home/zuul/zuul-output/0000755000175000017500000000000015071026621014122 5ustar zuulzuulhome/zuul/zuul-output/logs/0000755000175000017500000000000015071030460015062 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/0000755000175000017500000000000015071030401020352 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/0000755000175000017500000000000015071030370021323 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-k8s-operators-openstack-must-gather/0000755000175000017500000000000015071030313032251 5ustar zuulzuul././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-k8s-operators-openstack-must-gather/must-gather.logshome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-k8s-operators-openstack-must-gather/must0000644000175000017500000001115715071030313033171 0ustar zuulzuul[must-gather ] OUT 2025-10-06T21:12:42.444020819Z Using must-gather plug-in image: quay.io/openstack-k8s-operators/openstack-must-gather:latest When opening a support case, bugzilla, or issue please include the following summary data along with any other requested information: ClusterID: a84dabf3-edcf-4828-b6a1-f9d3a6f02304 ClientVersion: 4.19.13 ClusterVersion: Stable at "4.16.0" ClusterOperators: clusteroperator/authentication is not available (WellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"kubernetes", GenerateName:"", Namespace:"default", SelfLink:"", UID:"9374e9d0-f290-4faa-94c7-262a199a1d45", ResourceVersion:"43093", Generation:0, CreationTimestamp:time.Date(2024, time.June, 26, 12, 38, 3, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"endpointslice.kubernetes.io/skip-mirror":"true"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:"kube-apiserver", Operation:"Update", APIVersion:"v1", Time:time.Date(2025, time.August, 13, 20, 8, 43, 0, time.Local), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0xc002cb4f90), Subresource:""}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)) because All is well clusteroperator/kube-apiserver is progressing: NodeInstallerProgressing: 1 node is at revision 12; 0 nodes have achieved new revision 13 clusteroperator/machine-config is degraded because Failed to resync 4.16.0 because: error required MachineConfigPool master is paused and cannot sync until it is unpaused clusteroperator/network is progressing: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) clusteroperator/cloud-credential is missing clusteroperator/cluster-autoscaler is missing clusteroperator/insights is missing clusteroperator/monitoring is missing clusteroperator/storage is missing [must-gather ] OUT 2025-10-06T21:12:42.543938682Z namespace/openshift-must-gather-9wr6n created [must-gather ] OUT 2025-10-06T21:12:42.549448092Z clusterrolebinding.rbac.authorization.k8s.io/must-gather-2679m created [must-gather ] OUT 2025-10-06T21:12:43.589841409Z namespace/openshift-must-gather-9wr6n deleted Reprinting Cluster State: When opening a support case, bugzilla, or issue please include the following summary data along with any other requested information: ClusterID: a84dabf3-edcf-4828-b6a1-f9d3a6f02304 ClientVersion: 4.19.13 ClusterVersion: Stable at "4.16.0" ClusterOperators: clusteroperator/authentication is not available (WellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"kubernetes", GenerateName:"", Namespace:"default", SelfLink:"", UID:"9374e9d0-f290-4faa-94c7-262a199a1d45", ResourceVersion:"43093", Generation:0, CreationTimestamp:time.Date(2024, time.June, 26, 12, 38, 3, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"endpointslice.kubernetes.io/skip-mirror":"true"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:"kube-apiserver", Operation:"Update", APIVersion:"v1", Time:time.Date(2025, time.August, 13, 20, 8, 43, 0, time.Local), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0xc002cb4f90), Subresource:""}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)) because All is well clusteroperator/kube-apiserver is progressing: NodeInstallerProgressing: 1 node is at revision 12; 0 nodes have achieved new revision 13 clusteroperator/machine-config is degraded because Failed to resync 4.16.0 because: error required MachineConfigPool master is paused and cannot sync until it is unpaused clusteroperator/network is progressing: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) clusteroperator/cloud-credential is missing clusteroperator/cluster-autoscaler is missing clusteroperator/insights is missing clusteroperator/monitoring is missing clusteroperator/storage is missing ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-k8s-operators-openstack-must-gather/timestamphome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-k8s-operators-openstack-must-gather/time0000644000175000017500000000015615071030313033134 0ustar zuulzuul2025-10-06 21:12:42.553777357 +0000 UTC m=+0.234743809 2025-10-06 21:12:43.584887996 +0000 UTC m=+1.265854478 ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-k8s-operators-openstack-must-gather/event-filter.htmlhome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-k8s-operators-openstack-must-gather/even0000644000175000017500000000641015071030313033132 0ustar zuulzuul Events
Time Namespace Component RelatedObject Reason Message
home/zuul/zuul-output/logs/ci-framework-data/logs/crc/0000755000175000017500000000000015071030333022071 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/0000755000175000017500000000000015071030333025560 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/0000755000175000017500000000000015071030333026525 5ustar zuulzuul././@LongLink0000644000000000000000000000024400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_s0000755000175000017500000000000015071030333033006 5ustar zuulzuul././@LongLink0000644000000000000000000000027200000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_s0000755000175000017500000000000015071030333033006 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed/collect-profiles/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000024300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000755000175000017500000000000015071030333033123 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000755000175000017500000000000015071030333033123 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000755000175000017500000000000015071030333033123 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_ap0000755000175000017500000000000015071030333033123 5ustar zuulzuul././@LongLink0000644000000000000000000000023700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_cons0000755000175000017500000000000015071030333033127 5ustar zuulzuul././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_cons0000755000175000017500000000000015071030333033127 5ustar zuulzuul././@LongLink0000644000000000000000000000024600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_rout0000755000175000017500000000000015071030333033166 5ustar zuulzuul././@LongLink0000644000000000000000000000025500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_rout0000755000175000017500000000000015071030333033166 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb/collect-profiles/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-oper0000755000175000017500000000000015071030333033050 5ustar zuulzuul././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-oper0000755000175000017500000000000015071030333033050 5ustar zuulzuul././@LongLink0000644000000000000000000000025700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655/installer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000755000175000017500000000000015071030333032755 5ustar zuulzuul././@LongLink0000644000000000000000000000034100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000755000175000017500000000000015071030333032755 5ustar zuulzuul././@LongLink0000644000000000000000000000025300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000023500000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90/installer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000025600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000033200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000032200000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000030600000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015071030333033020 5ustar zuulzuul././@LongLink0000644000000000000000000000033600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015071030333033020 5ustar zuulzuul././@LongLink0000644000000000000000000000023700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015071030333032742 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2/cert-manager-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015071030333032742 5ustar zuulzuul././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-op0000755000175000017500000000000015071030333033057 5ustar zuulzuul././@LongLink0000644000000000000000000000033400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-op0000755000175000017500000000000015071030333033057 5ustar zuulzuul././@LongLink0000644000000000000000000000023500000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15/installer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000025400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000026300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635/pruner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000033000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000031200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72/collect-profiles/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124/installer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000022600000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-res0000755000175000017500000000000015071030333033023 5ustar zuulzuul././@LongLink0000644000000000000000000000025000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-res0000755000175000017500000000000015071030333033023 5ustar zuulzuul././@LongLink0000644000000000000000000000025300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operato0000755000175000017500000000000015071030333033031 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operato0000755000175000017500000000000015071030333033031 5ustar zuulzuul././@LongLink0000644000000000000000000000023500000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754/installer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000025200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015071030333032742 5ustar zuulzuul././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34/cert-manager-cainjector/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015071030333032742 5ustar zuulzuul././@LongLink0000644000000000000000000000025400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_7dae59545f22b3fb679a7fbf878a6379/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_7dae59545f22b3fb679a7fbf878a6379/startup-monitor/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000025500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc/pruner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000024600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000031100000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000031100000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000035300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-schedul0000755000175000017500000000000015071030333033016 5ustar zuulzuul././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samp0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000033700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samp0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000033100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samp0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-opera0000755000175000017500000000000015071030333033014 5ustar zuulzuul././@LongLink0000644000000000000000000000032300000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-opera0000755000175000017500000000000015071030333033014 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-opera0000755000175000017500000000000015071030333033014 5ustar zuulzuul././@LongLink0000644000000000000000000000033500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000755000175000017500000000000015071030333033033 5ustar zuulzuul././@LongLink0000644000000000000000000000040400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000755000175000017500000000000015071030333033033 5ustar zuulzuul././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000032700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000024200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_netwo0000755000175000017500000000000015071030333033210 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_netwo0000755000175000017500000000000015071030333033210 5ustar zuulzuul././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_netwo0000755000175000017500000000000015071030333033210 5ustar zuulzuul././@LongLink0000644000000000000000000000021000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015071030333032763 5ustar zuulzuul././@LongLink0000644000000000000000000000021500000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015071030333032763 5ustar zuulzuul././@LongLink0000644000000000000000000000023400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015071030333032763 5ustar zuulzuul././@LongLink0000644000000000000000000000021600000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015071030333032763 5ustar zuulzuul././@LongLink0000644000000000000000000000022500000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015071030333032763 5ustar zuulzuul././@LongLink0000644000000000000000000000023500000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015071030333032763 5ustar zuulzuul././@LongLink0000644000000000000000000000022400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015071030333032763 5ustar zuulzuul././@LongLink0000644000000000000000000000022000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-cr0000755000175000017500000000000015071030333032763 5ustar zuulzuul././@LongLink0000644000000000000000000000024200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-cana0000755000175000017500000000000015071030333033015 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-cana0000755000175000017500000000000015071030333033015 5ustar zuulzuul././@LongLink0000644000000000000000000000023600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000025000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8/installer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000025700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000030400000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc/marketplace-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-oper0000755000175000017500000000000015071030333033077 5ustar zuulzuul././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-oper0000755000175000017500000000000015071030333033077 5ustar zuulzuul././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000030600000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000025100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000031400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000030300000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000031700000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000032700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000024400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-content/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-utilities/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/registry-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000025500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd/pruner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000024300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/registry-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-content/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-utilities/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000023400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000025300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000024200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/setup/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-check-endpoints/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000027300000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-insecure-readyz/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-regeneration-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-syncer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-contro0000755000175000017500000000000015071030333033103 5ustar zuulzuul././@LongLink0000644000000000000000000000033200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-contro0000755000175000017500000000000015071030333033103 5ustar zuulzuul././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-vers0000755000175000017500000000000015071030333033101 5ustar zuulzuul././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-vers0000755000175000017500000000000015071030333033101 5ustar zuulzuul././@LongLink0000644000000000000000000000024100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/registry-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-content/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-utilities/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000025500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015071030333033020 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229/registry/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015071030333033020 5ustar zuulzuul././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000024100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_down0000755000175000017500000000000015071030333033134 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_down0000755000175000017500000000000015071030333033134 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000030400000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-mach0000755000175000017500000000000015071030333033032 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-mach0000755000175000017500000000000015071030333033032 5ustar zuulzuul././@LongLink0000644000000000000000000000032400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-mach0000755000175000017500000000000015071030333033032 5ustar zuulzuul././@LongLink0000644000000000000000000000025600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authenticati0000755000175000017500000000000015071030333033125 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authenticati0000755000175000017500000000000015071030333033125 5ustar zuulzuul././@LongLink0000644000000000000000000000027200000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000031300000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lif0000755000175000017500000000000015071030333033046 5ustar zuulzuul././@LongLink0000644000000000000000000000022200000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000023600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000024200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000755000175000017500000000000015071030333033157 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000755000175000017500000000000015071030333033157 5ustar zuulzuul././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000755000175000017500000000000015071030333033157 5ustar zuulzuul././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000755000175000017500000000000015071030333033157 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_c0000755000175000017500000000000015071030333033157 5ustar zuulzuul././@LongLink0000644000000000000000000000025100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiser0000755000175000017500000000000015071030333033044 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiser0000755000175000017500000000000015071030333033044 5ustar zuulzuul././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiser0000755000175000017500000000000015071030333033044 5ustar zuulzuul././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-m0000755000175000017500000000000015071030333033060 5ustar zuulzuul././@LongLink0000644000000000000000000000031000000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-m0000755000175000017500000000000015071030333033060 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-oper0000755000175000017500000000000015071030333033060 5ustar zuulzuul././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-oper0000755000175000017500000000000015071030333033060 5ustar zuulzuul././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-oper0000755000175000017500000000000015071030333033060 5ustar zuulzuul././@LongLink0000644000000000000000000000023600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000025000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a/installer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserv0000755000175000017500000000000015071030333033040 5ustar zuulzuul././@LongLink0000644000000000000000000000024600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-oper0000755000175000017500000000000015071030333033077 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-oper0000755000175000017500000000000015071030333033077 5ustar zuulzuul././@LongLink0000644000000000000000000000027700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authenticati0000755000175000017500000000000015071030333033125 5ustar zuulzuul././@LongLink0000644000000000000000000000032700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authenticati0000755000175000017500000000000015071030333033125 5ustar zuulzuul././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015071030333032742 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10/cert-manager-webhook/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-mana0000755000175000017500000000000015071030333032742 5ustar zuulzuul././@LongLink0000644000000000000000000000030100000000000011575 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000033300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-conf0000755000175000017500000000000015071030333032772 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-o0000755000175000017500000000000015071030333032720 5ustar zuulzuul././@LongLink0000644000000000000000000000031300000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-o0000755000175000017500000000000015071030333032720 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000031400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000030400000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000024400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/registry-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026400000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-content/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-utilities/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_0000755000175000017500000000000015071030333033072 5ustar zuulzuul././@LongLink0000644000000000000000000000025400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000026300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200/pruner/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000755000175000017500000000000015071030333033033 5ustar zuulzuul././@LongLink0000644000000000000000000000027600000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage0000755000175000017500000000000015071030333033033 5ustar zuulzuul././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000755000175000017500000000000015071030333032755 5ustar zuulzuul././@LongLink0000644000000000000000000000030000000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000755000175000017500000000000015071030333032755 5ustar zuulzuul././@LongLink0000644000000000000000000000030500000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_0000755000175000017500000000000015071030333032755 5ustar zuulzuul././@LongLink0000644000000000000000000000024000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000026300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovnkube-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000025700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000024500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/sbdb/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000025600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kubecfg-setup/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000024500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/nbdb/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/northd/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000027400000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-ovn-metrics/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000026500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-node/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-acl-logging/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernet0000755000175000017500000000000015071030333033062 5ustar zuulzuul././@LongLink0000644000000000000000000000025500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diag0000755000175000017500000000000015071030333033036 5ustar zuulzuul././@LongLink0000644000000000000000000000031400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diag0000755000175000017500000000000015071030333033036 5ustar zuulzuul././@LongLink0000644000000000000000000000022400000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-defa0000755000175000017500000000000015071030333032770 5ustar zuulzuul././@LongLink0000644000000000000000000000023000000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-defa0000755000175000017500000000000015071030333032770 5ustar zuulzuul././@LongLink0000644000000000000000000000024400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-defa0000755000175000017500000000000015071030333032770 5ustar zuulzuul././@LongLink0000644000000000000000000000026000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node0000755000175000017500000000000015071030333033057 5ustar zuulzuul././@LongLink0000644000000000000000000000027000000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node0000755000175000017500000000000015071030333033057 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node0000755000175000017500000000000015071030333033057 5ustar zuulzuul././@LongLink0000644000000000000000000000026200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000030200000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000031600000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multu0000755000175000017500000000000015071030333033222 5ustar zuulzuul././@LongLink0000644000000000000000000000025100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator0000755000175000017500000000000015071030333033060 5ustar zuulzuul././@LongLink0000644000000000000000000000026600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator0000755000175000017500000000000015071030333033060 5ustar zuulzuul././@LongLink0000644000000000000000000000027100000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator0000755000175000017500000000000015071030333033060 5ustar zuulzuul././@LongLink0000644000000000000000000000024700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000026100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53/installer/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000026700000000000011610 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diag0000755000175000017500000000000015071030333033036 5ustar zuulzuul././@LongLink0000644000000000000000000000030700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diag0000755000175000017500000000000015071030333033036 5ustar zuulzuul././@LongLink0000644000000000000000000000023300000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015071030333033020 5ustar zuulzuul././@LongLink0000644000000000000000000000024300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-regist0000755000175000017500000000000015071030333033020 5ustar zuulzuul././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-m0000755000175000017500000000000015071030333033060 5ustar zuulzuul././@LongLink0000644000000000000000000000036700000000000011611 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-m0000755000175000017500000000000015071030333033060 5ustar zuulzuul././@LongLink0000644000000000000000000000027200000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-oper0000755000175000017500000000000015071030333033050 5ustar zuulzuul././@LongLink0000644000000000000000000000032400000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-oper0000755000175000017500000000000015071030333033050 5ustar zuulzuul././@LongLink0000644000000000000000000000032100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuul././@LongLink0000644000000000000000000000036200000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/home/zuul/zuul-output/logs/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-control0000755000175000017500000000000015071030333033047 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/2025-10-06_21-12/0000775000175000017500000000000015071030371023101 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/2025-10-06_21-12/ansible.log0000666000175000017500000050126315071030247025234 0ustar zuulzuul2025-10-06 21:09:22,224 p=27534 u=zuul n=ansible | Starting galaxy collection install process 2025-10-06 21:09:22,225 p=27534 u=zuul n=ansible | Process install dependency map 2025-10-06 21:09:36,208 p=27534 u=zuul n=ansible | Starting collection install process 2025-10-06 21:09:36,208 p=27534 u=zuul n=ansible | Installing 'cifmw.general:1.0.0+35b8986b' to '/home/zuul/.ansible/collections/ansible_collections/cifmw/general' 2025-10-06 21:09:36,658 p=27534 u=zuul n=ansible | Created collection for cifmw.general:1.0.0+35b8986b at /home/zuul/.ansible/collections/ansible_collections/cifmw/general 2025-10-06 21:09:36,659 p=27534 u=zuul n=ansible | cifmw.general:1.0.0+35b8986b was installed successfully 2025-10-06 21:09:36,659 p=27534 u=zuul n=ansible | Installing 'containers.podman:1.16.2' to '/home/zuul/.ansible/collections/ansible_collections/containers/podman' 2025-10-06 21:09:36,731 p=27534 u=zuul n=ansible | Created collection for containers.podman:1.16.2 at /home/zuul/.ansible/collections/ansible_collections/containers/podman 2025-10-06 21:09:36,731 p=27534 u=zuul n=ansible | containers.podman:1.16.2 was installed successfully 2025-10-06 21:09:36,731 p=27534 u=zuul n=ansible | Installing 'community.general:10.0.1' to '/home/zuul/.ansible/collections/ansible_collections/community/general' 2025-10-06 21:09:37,480 p=27534 u=zuul n=ansible | Created collection for community.general:10.0.1 at /home/zuul/.ansible/collections/ansible_collections/community/general 2025-10-06 21:09:37,481 p=27534 u=zuul n=ansible | community.general:10.0.1 was installed successfully 2025-10-06 21:09:37,481 p=27534 u=zuul n=ansible | Installing 'ansible.posix:1.6.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/posix' 2025-10-06 21:09:37,530 p=27534 u=zuul n=ansible | Created collection for ansible.posix:1.6.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/posix 2025-10-06 21:09:37,530 p=27534 u=zuul n=ansible | ansible.posix:1.6.2 was installed successfully 2025-10-06 21:09:37,530 p=27534 u=zuul n=ansible | Installing 'ansible.utils:5.1.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/utils' 2025-10-06 21:09:37,633 p=27534 u=zuul n=ansible | Created collection for ansible.utils:5.1.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/utils 2025-10-06 21:09:37,634 p=27534 u=zuul n=ansible | ansible.utils:5.1.2 was installed successfully 2025-10-06 21:09:37,634 p=27534 u=zuul n=ansible | Installing 'community.libvirt:1.3.0' to '/home/zuul/.ansible/collections/ansible_collections/community/libvirt' 2025-10-06 21:09:37,658 p=27534 u=zuul n=ansible | Created collection for community.libvirt:1.3.0 at /home/zuul/.ansible/collections/ansible_collections/community/libvirt 2025-10-06 21:09:37,658 p=27534 u=zuul n=ansible | community.libvirt:1.3.0 was installed successfully 2025-10-06 21:09:37,658 p=27534 u=zuul n=ansible | Installing 'community.crypto:2.22.3' to '/home/zuul/.ansible/collections/ansible_collections/community/crypto' 2025-10-06 21:09:37,809 p=27534 u=zuul n=ansible | Created collection for community.crypto:2.22.3 at /home/zuul/.ansible/collections/ansible_collections/community/crypto 2025-10-06 21:09:37,810 p=27534 u=zuul n=ansible | community.crypto:2.22.3 was installed successfully 2025-10-06 21:09:37,810 p=27534 u=zuul n=ansible | Installing 'kubernetes.core:5.0.0' to '/home/zuul/.ansible/collections/ansible_collections/kubernetes/core' 2025-10-06 21:09:37,931 p=27534 u=zuul n=ansible | Created collection for kubernetes.core:5.0.0 at /home/zuul/.ansible/collections/ansible_collections/kubernetes/core 2025-10-06 21:09:37,931 p=27534 u=zuul n=ansible | kubernetes.core:5.0.0 was installed successfully 2025-10-06 21:09:37,931 p=27534 u=zuul n=ansible | Installing 'ansible.netcommon:7.1.0' to '/home/zuul/.ansible/collections/ansible_collections/ansible/netcommon' 2025-10-06 21:09:37,996 p=27534 u=zuul n=ansible | Created collection for ansible.netcommon:7.1.0 at /home/zuul/.ansible/collections/ansible_collections/ansible/netcommon 2025-10-06 21:09:37,996 p=27534 u=zuul n=ansible | ansible.netcommon:7.1.0 was installed successfully 2025-10-06 21:09:37,996 p=27534 u=zuul n=ansible | Installing 'openstack.config_template:2.1.1' to '/home/zuul/.ansible/collections/ansible_collections/openstack/config_template' 2025-10-06 21:09:38,013 p=27534 u=zuul n=ansible | Created collection for openstack.config_template:2.1.1 at /home/zuul/.ansible/collections/ansible_collections/openstack/config_template 2025-10-06 21:09:38,013 p=27534 u=zuul n=ansible | openstack.config_template:2.1.1 was installed successfully 2025-10-06 21:09:38,013 p=27534 u=zuul n=ansible | Installing 'junipernetworks.junos:9.1.0' to '/home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos' 2025-10-06 21:09:38,233 p=27534 u=zuul n=ansible | Created collection for junipernetworks.junos:9.1.0 at /home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos 2025-10-06 21:09:38,233 p=27534 u=zuul n=ansible | junipernetworks.junos:9.1.0 was installed successfully 2025-10-06 21:09:38,233 p=27534 u=zuul n=ansible | Installing 'cisco.ios:9.0.3' to '/home/zuul/.ansible/collections/ansible_collections/cisco/ios' 2025-10-06 21:09:38,479 p=27534 u=zuul n=ansible | Created collection for cisco.ios:9.0.3 at /home/zuul/.ansible/collections/ansible_collections/cisco/ios 2025-10-06 21:09:38,480 p=27534 u=zuul n=ansible | cisco.ios:9.0.3 was installed successfully 2025-10-06 21:09:38,480 p=27534 u=zuul n=ansible | Installing 'mellanox.onyx:1.0.0' to '/home/zuul/.ansible/collections/ansible_collections/mellanox/onyx' 2025-10-06 21:09:38,511 p=27534 u=zuul n=ansible | Created collection for mellanox.onyx:1.0.0 at /home/zuul/.ansible/collections/ansible_collections/mellanox/onyx 2025-10-06 21:09:38,512 p=27534 u=zuul n=ansible | mellanox.onyx:1.0.0 was installed successfully 2025-10-06 21:09:38,512 p=27534 u=zuul n=ansible | Installing 'community.okd:4.0.0' to '/home/zuul/.ansible/collections/ansible_collections/community/okd' 2025-10-06 21:09:38,540 p=27534 u=zuul n=ansible | Created collection for community.okd:4.0.0 at /home/zuul/.ansible/collections/ansible_collections/community/okd 2025-10-06 21:09:38,540 p=27534 u=zuul n=ansible | community.okd:4.0.0 was installed successfully 2025-10-06 21:09:38,540 p=27534 u=zuul n=ansible | Installing '@NAMESPACE@.@NAME@:3.1.4' to '/home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@' 2025-10-06 21:09:38,633 p=27534 u=zuul n=ansible | Created collection for @NAMESPACE@.@NAME@:3.1.4 at /home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@ 2025-10-06 21:09:38,633 p=27534 u=zuul n=ansible | @NAMESPACE@.@NAME@:3.1.4 was installed successfully 2025-10-06 21:09:48,364 p=28169 u=zuul n=ansible | PLAY [Remove status flag] ****************************************************** 2025-10-06 21:09:48,383 p=28169 u=zuul n=ansible | TASK [Gathering Facts ] ******************************************************** 2025-10-06 21:09:48,383 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:48 +0000 (0:00:00.036) 0:00:00.036 ******** 2025-10-06 21:09:49,294 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:49,309 p=28169 u=zuul n=ansible | TASK [Delete success flag if exists path={{ ansible_user_dir }}/cifmw-success, state=absent] *** 2025-10-06 21:09:49,310 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.926) 0:00:00.962 ******** 2025-10-06 21:09:49,602 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:49,615 p=28169 u=zuul n=ansible | TASK [Inherit from parent scenarios if needed _raw_params=ci/playbooks/tasks/inherit_parent_scenario.yml] *** 2025-10-06 21:09:49,615 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.305) 0:00:01.267 ******** 2025-10-06 21:09:49,678 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/tasks/inherit_parent_scenario.yml for localhost 2025-10-06 21:09:49,729 p=28169 u=zuul n=ansible | TASK [Inherit from parent parameter file if instructed file={{ item }}] ******** 2025-10-06 21:09:49,729 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.114) 0:00:01.382 ******** 2025-10-06 21:09:49,766 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:09:49,773 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Set custom cifmw PATH reusable fact cifmw_path={{ ansible_user_dir }}/.crc/bin:{{ ansible_user_dir }}/.crc/bin/oc:{{ ansible_user_dir }}/bin:{{ ansible_env.PATH }}, cacheable=True] *** 2025-10-06 21:09:49,773 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.043) 0:00:01.425 ******** 2025-10-06 21:09:49,810 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:49,816 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Get customized parameters ci_framework_params={{ hostvars[inventory_hostname] | dict2items | selectattr("key", "match", "^(cifmw|pre|post)_(?!install_yamls|openshift_token|openshift_login|openshift_kubeconfig).*") | list | items2dict }}] *** 2025-10-06 21:09:49,816 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.042) 0:00:01.468 ******** 2025-10-06 21:09:49,893 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:49,899 p=28169 u=zuul n=ansible | TASK [install_ca : Ensure target directory exists path={{ cifmw_install_ca_trust_dir }}, state=directory, mode=0755] *** 2025-10-06 21:09:49,899 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.083) 0:00:01.552 ******** 2025-10-06 21:09:50,090 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:50,096 p=28169 u=zuul n=ansible | TASK [install_ca : Install internal CA from url url={{ cifmw_install_ca_url }}, dest={{ cifmw_install_ca_trust_dir }}, validate_certs={{ cifmw_install_ca_url_validate_certs | default(omit) }}, mode=0644] *** 2025-10-06 21:09:50,096 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:50 +0000 (0:00:00.197) 0:00:01.749 ******** 2025-10-06 21:09:50,115 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:09:50,123 p=28169 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from inline dest={{ cifmw_install_ca_trust_dir }}/cifmw_inline_ca_bundle.crt, content={{ cifmw_install_ca_bundle_inline }}, mode=0644] *** 2025-10-06 21:09:50,123 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:50 +0000 (0:00:00.026) 0:00:01.776 ******** 2025-10-06 21:09:50,144 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:09:50,150 p=28169 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from file dest={{ cifmw_install_ca_trust_dir }}/{{ cifmw_install_ca_bundle_src | basename }}, src={{ cifmw_install_ca_bundle_src }}, mode=0644] *** 2025-10-06 21:09:50,151 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:50 +0000 (0:00:00.027) 0:00:01.803 ******** 2025-10-06 21:09:50,169 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:09:50,176 p=28169 u=zuul n=ansible | TASK [install_ca : Update ca bundle _raw_params=update-ca-trust] *************** 2025-10-06 21:09:50,176 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:50 +0000 (0:00:00.025) 0:00:01.828 ******** 2025-10-06 21:09:51,592 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:09:51,603 p=28169 u=zuul n=ansible | TASK [repo_setup : Ensure directories are present path={{ cifmw_repo_setup_basedir }}/{{ item }}, state=directory, mode=0755] *** 2025-10-06 21:09:51,603 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:51 +0000 (0:00:01.427) 0:00:03.256 ******** 2025-10-06 21:09:51,810 p=28169 u=zuul n=ansible | changed: [localhost] => (item=tmp) 2025-10-06 21:09:51,973 p=28169 u=zuul n=ansible | changed: [localhost] => (item=artifacts/repositories) 2025-10-06 21:09:52,186 p=28169 u=zuul n=ansible | changed: [localhost] => (item=venv/repo_setup) 2025-10-06 21:09:52,202 p=28169 u=zuul n=ansible | TASK [repo_setup : Make sure git-core package is installed name=git-core, state=present] *** 2025-10-06 21:09:52,202 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:52 +0000 (0:00:00.598) 0:00:03.854 ******** 2025-10-06 21:09:53,193 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:53,200 p=28169 u=zuul n=ansible | TASK [repo_setup : Get repo-setup repository accept_hostkey=True, dest={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, repo={{ cifmw_repo_setup_src }}] *** 2025-10-06 21:09:53,200 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:53 +0000 (0:00:00.997) 0:00:04.852 ******** 2025-10-06 21:09:54,372 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:09:54,380 p=28169 u=zuul n=ansible | TASK [repo_setup : Initialize python venv and install requirements virtualenv={{ cifmw_repo_setup_venv }}, requirements={{ cifmw_repo_setup_basedir }}/tmp/repo-setup/requirements.txt, virtualenv_command=python3 -m venv --system-site-packages --upgrade-deps] *** 2025-10-06 21:09:54,380 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:54 +0000 (0:00:01.179) 0:00:06.032 ******** 2025-10-06 21:10:02,777 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:02,784 p=28169 u=zuul n=ansible | TASK [repo_setup : Install repo-setup package chdir={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, creates={{ cifmw_repo_setup_venv }}/bin/repo-setup, _raw_params={{ cifmw_repo_setup_venv }}/bin/python setup.py install] *** 2025-10-06 21:10:02,784 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:02 +0000 (0:00:08.404) 0:00:14.436 ******** 2025-10-06 21:10:03,629 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:03,636 p=28169 u=zuul n=ansible | TASK [repo_setup : Set cifmw_repo_setup_dlrn_hash_tag from content provider cifmw_repo_setup_dlrn_hash_tag={{ content_provider_dlrn_md5_hash }}] *** 2025-10-06 21:10:03,636 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:03 +0000 (0:00:00.852) 0:00:15.289 ******** 2025-10-06 21:10:03,674 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:03,681 p=28169 u=zuul n=ansible | TASK [repo_setup : Run repo-setup _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup {{ cifmw_repo_setup_promotion }} {{ cifmw_repo_setup_additional_repos }} -d {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} -b {{ cifmw_repo_setup_branch }} --rdo-mirror {{ cifmw_repo_setup_rdo_mirror }} {% if cifmw_repo_setup_dlrn_hash_tag | length > 0 %} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif %} -o {{ cifmw_repo_setup_output }}] *** 2025-10-06 21:10:03,681 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:03 +0000 (0:00:00.044) 0:00:15.333 ******** 2025-10-06 21:10:04,351 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:04,357 p=28169 u=zuul n=ansible | TASK [repo_setup : Get component repo url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/component/{{ cifmw_repo_setup_component_name }}/{{ cifmw_repo_setup_component_promotion_tag }}/delorean.repo, dest={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, mode=0644] *** 2025-10-06 21:10:04,357 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:04 +0000 (0:00:00.676) 0:00:16.010 ******** 2025-10-06 21:10:04,386 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:04,393 p=28169 u=zuul n=ansible | TASK [repo_setup : Rename component repo path={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, regexp=delorean-component-{{ cifmw_repo_setup_component_name }}, replace={{ cifmw_repo_setup_component_name }}-{{ cifmw_repo_setup_component_promotion_tag }}] *** 2025-10-06 21:10:04,393 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:04 +0000 (0:00:00.035) 0:00:16.046 ******** 2025-10-06 21:10:04,426 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:04,435 p=28169 u=zuul n=ansible | TASK [repo_setup : Disable component repo in current-podified dlrn repo path={{ cifmw_repo_setup_output }}/delorean.repo, section=delorean-component-{{ cifmw_repo_setup_component_name }}, option=enabled, value=0, mode=0644] *** 2025-10-06 21:10:04,435 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:04 +0000 (0:00:00.041) 0:00:16.087 ******** 2025-10-06 21:10:04,468 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:04,476 p=28169 u=zuul n=ansible | TASK [repo_setup : Run repo-setup-get-hash _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup-get-hash --dlrn-url {{ cifmw_repo_setup_dlrn_uri[:-1] }} --os-version {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} --release {{ cifmw_repo_setup_branch }} {% if cifmw_repo_setup_component_name | length > 0 -%} --component {{ cifmw_repo_setup_component_name }} --tag {{ cifmw_repo_setup_component_promotion_tag }} {% else -%} --tag {{cifmw_repo_setup_promotion }} {% endif -%} {% if (cifmw_repo_setup_dlrn_hash_tag | length > 0) and (cifmw_repo_setup_component_name | length <= 0) -%} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif -%} --json] *** 2025-10-06 21:10:04,476 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:04 +0000 (0:00:00.041) 0:00:16.129 ******** 2025-10-06 21:10:04,918 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:04,931 p=28169 u=zuul n=ansible | TASK [repo_setup : Dump full hash in delorean.repo.md5 file content={{ _repo_setup_json['full_hash'] }} , dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2025-10-06 21:10:04,931 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:04 +0000 (0:00:00.454) 0:00:16.584 ******** 2025-10-06 21:10:05,576 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:05,583 p=28169 u=zuul n=ansible | TASK [repo_setup : Dump current-podified hash url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/current-podified/delorean.repo.md5, dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2025-10-06 21:10:05,583 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.651) 0:00:17.235 ******** 2025-10-06 21:10:05,616 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,623 p=28169 u=zuul n=ansible | TASK [repo_setup : Slurp current podified hash src={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5] *** 2025-10-06 21:10:05,623 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.040) 0:00:17.275 ******** 2025-10-06 21:10:05,639 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,645 p=28169 u=zuul n=ansible | TASK [repo_setup : Update the value of full_hash _repo_setup_json={{ _repo_setup_json | combine({'full_hash': _hash}, recursive=true) }}] *** 2025-10-06 21:10:05,645 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.022) 0:00:17.298 ******** 2025-10-06 21:10:05,662 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,668 p=28169 u=zuul n=ansible | TASK [repo_setup : Export hashes facts for further use cifmw_repo_setup_full_hash={{ _repo_setup_json['full_hash'] }}, cifmw_repo_setup_commit_hash={{ _repo_setup_json['commit_hash'] }}, cifmw_repo_setup_distro_hash={{ _repo_setup_json['distro_hash'] }}, cifmw_repo_setup_extended_hash={{ _repo_setup_json['extended_hash'] }}, cifmw_repo_setup_dlrn_api_url={{ _repo_setup_json['dlrn_api_url'] }}, cifmw_repo_setup_dlrn_url={{ _repo_setup_json['dlrn_url'] }}, cifmw_repo_setup_release={{ _repo_setup_json['release'] }}, cacheable=True] *** 2025-10-06 21:10:05,668 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.022) 0:00:17.321 ******** 2025-10-06 21:10:05,695 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:05,701 p=28169 u=zuul n=ansible | TASK [repo_setup : Create download directory path={{ cifmw_repo_setup_rhos_release_path }}, state=directory, mode=0755] *** 2025-10-06 21:10:05,701 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.032) 0:00:17.354 ******** 2025-10-06 21:10:05,715 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,722 p=28169 u=zuul n=ansible | TASK [repo_setup : Print the URL to request msg={{ cifmw_repo_setup_rhos_release_rpm }}] *** 2025-10-06 21:10:05,722 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.020) 0:00:17.375 ******** 2025-10-06 21:10:05,736 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,743 p=28169 u=zuul n=ansible | TASK [Download the RPM name=krb_request] *************************************** 2025-10-06 21:10:05,743 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.020) 0:00:17.395 ******** 2025-10-06 21:10:05,757 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,764 p=28169 u=zuul n=ansible | TASK [repo_setup : Install RHOS Release tool name={{ cifmw_repo_setup_rhos_release_rpm if cifmw_repo_setup_rhos_release_rpm is not url else cifmw_krb_request_out.path }}, state=present, disable_gpg_check={{ cifmw_repo_setup_rhos_release_gpg_check | bool }}] *** 2025-10-06 21:10:05,764 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.021) 0:00:17.416 ******** 2025-10-06 21:10:05,778 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,784 p=28169 u=zuul n=ansible | TASK [repo_setup : Get rhos-release tool version _raw_params=rhos-release --version] *** 2025-10-06 21:10:05,785 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.020) 0:00:17.437 ******** 2025-10-06 21:10:05,798 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,806 p=28169 u=zuul n=ansible | TASK [repo_setup : Print rhos-release tool version msg={{ rr_version.stdout }}] *** 2025-10-06 21:10:05,806 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.021) 0:00:17.458 ******** 2025-10-06 21:10:05,819 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,827 p=28169 u=zuul n=ansible | TASK [repo_setup : Generate repos using rhos-release {{ cifmw_repo_setup_rhos_release_args }} _raw_params=rhos-release {{ cifmw_repo_setup_rhos_release_args }} \ -t {{ cifmw_repo_setup_output }}] *** 2025-10-06 21:10:05,827 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.021) 0:00:17.480 ******** 2025-10-06 21:10:05,841 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,847 p=28169 u=zuul n=ansible | TASK [repo_setup : Check for /etc/ci/mirror_info.sh path=/etc/ci/mirror_info.sh] *** 2025-10-06 21:10:05,847 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.019) 0:00:17.500 ******** 2025-10-06 21:10:06,042 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:06,048 p=28169 u=zuul n=ansible | TASK [repo_setup : Use RDO proxy mirrors chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|https://trunk.rdoproject.org|$NODEPOOL_RDO_PROXY|g" *.repo ] *** 2025-10-06 21:10:06,048 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:06 +0000 (0:00:00.201) 0:00:17.701 ******** 2025-10-06 21:10:06,242 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:06,248 p=28169 u=zuul n=ansible | TASK [repo_setup : Use RDO CentOS mirrors (remove CentOS 10 conditional when Nodepool mirrors exist) chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|http://mirror.stream.centos.org|$NODEPOOL_CENTOS_MIRROR|g" *.repo ] *** 2025-10-06 21:10:06,248 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:06 +0000 (0:00:00.199) 0:00:17.901 ******** 2025-10-06 21:10:06,452 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:06,458 p=28169 u=zuul n=ansible | TASK [repo_setup : Check for gating.repo file on content provider url=http://{{ content_provider_registry_ip }}:8766/gating.repo] *** 2025-10-06 21:10:06,458 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:06 +0000 (0:00:00.210) 0:00:18.111 ******** 2025-10-06 21:10:06,957 p=28169 u=zuul n=ansible | fatal: [localhost]: FAILED! => changed: false elapsed: 0 msg: 'Status code was -1 and not [200]: Request failed: ' redirected: false status: -1 url: http://38.102.83.53:8766/gating.repo 2025-10-06 21:10:06,957 p=28169 u=zuul n=ansible | ...ignoring 2025-10-06 21:10:06,964 p=28169 u=zuul n=ansible | TASK [repo_setup : Populate gating repo from content provider ip content=[gating-repo] baseurl=http://{{ content_provider_registry_ip }}:8766/ enabled=1 gpgcheck=0 priority=1 , dest={{ cifmw_repo_setup_output }}/gating.repo, mode=0644] *** 2025-10-06 21:10:06,964 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:06 +0000 (0:00:00.505) 0:00:18.616 ******** 2025-10-06 21:10:06,990 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:07,000 p=28169 u=zuul n=ansible | TASK [repo_setup : Check for DLRN repo at the destination path={{ cifmw_repo_setup_output }}/delorean.repo] *** 2025-10-06 21:10:07,000 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.036) 0:00:18.653 ******** 2025-10-06 21:10:07,028 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:07,035 p=28169 u=zuul n=ansible | TASK [repo_setup : Lower the priority of DLRN repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}/delorean.repo, regexp=priority=1, replace=priority=20] *** 2025-10-06 21:10:07,035 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.035) 0:00:18.688 ******** 2025-10-06 21:10:07,061 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:07,068 p=28169 u=zuul n=ansible | TASK [repo_setup : Check for DLRN component repo path={{ cifmw_repo_setup_output }}/{{ _comp_repo }}] *** 2025-10-06 21:10:07,068 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.032) 0:00:18.720 ******** 2025-10-06 21:10:07,094 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:07,103 p=28169 u=zuul n=ansible | TASK [repo_setup : Lower the priority of componennt repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}//{{ _comp_repo }}, regexp=priority=1, replace=priority=2] *** 2025-10-06 21:10:07,103 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.035) 0:00:18.756 ******** 2025-10-06 21:10:07,127 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:07,135 p=28169 u=zuul n=ansible | TASK [repo_setup : Find existing repos from /etc/yum.repos.d directory paths=/etc/yum.repos.d/, patterns=*.repo, recurse=False] *** 2025-10-06 21:10:07,136 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.032) 0:00:18.788 ******** 2025-10-06 21:10:07,422 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:07,427 p=28169 u=zuul n=ansible | TASK [repo_setup : Remove existing repos from /etc/yum.repos.d directory path={{ item }}, state=absent] *** 2025-10-06 21:10:07,428 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.291) 0:00:19.080 ******** 2025-10-06 21:10:07,638 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos-addons.repo) 2025-10-06 21:10:07,831 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos.repo) 2025-10-06 21:10:07,839 p=28169 u=zuul n=ansible | TASK [repo_setup : Cleanup existing metadata _raw_params=dnf clean metadata] *** 2025-10-06 21:10:07,839 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.411) 0:00:19.491 ******** 2025-10-06 21:10:08,285 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:08,299 p=28169 u=zuul n=ansible | TASK [repo_setup : Copy generated repos to /etc/yum.repos.d directory mode=0755, remote_src=True, src={{ cifmw_repo_setup_output }}/, dest=/etc/yum.repos.d] *** 2025-10-06 21:10:08,299 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:08 +0000 (0:00:00.460) 0:00:19.951 ******** 2025-10-06 21:10:08,590 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:08,602 p=28169 u=zuul n=ansible | TASK [ci_setup : Gather variables for each operating system _raw_params={{ item }}] *** 2025-10-06 21:10:08,602 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:08 +0000 (0:00:00.303) 0:00:20.255 ******** 2025-10-06 21:10:08,645 p=28169 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_setup/vars/redhat.yml) 2025-10-06 21:10:08,653 p=28169 u=zuul n=ansible | TASK [ci_setup : List packages to install var=cifmw_ci_setup_packages] ********* 2025-10-06 21:10:08,654 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:08 +0000 (0:00:00.051) 0:00:20.306 ******** 2025-10-06 21:10:08,674 p=28169 u=zuul n=ansible | ok: [localhost] => cifmw_ci_setup_packages: - bash-completion - ca-certificates - git-core - make - tar - tmux - python3-pip 2025-10-06 21:10:08,680 p=28169 u=zuul n=ansible | TASK [ci_setup : Install needed packages name={{ cifmw_ci_setup_packages }}, state=latest] *** 2025-10-06 21:10:08,680 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:08 +0000 (0:00:00.026) 0:00:20.332 ******** 2025-10-06 21:10:34,121 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:34,129 p=28169 u=zuul n=ansible | TASK [ci_setup : Gather version of openshift client _raw_params=oc version --client -o yaml] *** 2025-10-06 21:10:34,129 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:34 +0000 (0:00:25.449) 0:00:45.782 ******** 2025-10-06 21:10:34,332 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:34,338 p=28169 u=zuul n=ansible | TASK [ci_setup : Ensure openshift client install path is present path={{ cifmw_ci_setup_oc_install_path }}, state=directory, mode=0755] *** 2025-10-06 21:10:34,338 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:34 +0000 (0:00:00.208) 0:00:45.990 ******** 2025-10-06 21:10:34,526 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:34,534 p=28169 u=zuul n=ansible | TASK [ci_setup : Install openshift client src={{ cifmw_ci_setup_openshift_client_download_uri }}/{{ cifmw_ci_setup_openshift_client_version }}/openshift-client-linux.tar.gz, dest={{ cifmw_ci_setup_oc_install_path }}, remote_src=True, mode=0755, creates={{ cifmw_ci_setup_oc_install_path }}/oc] *** 2025-10-06 21:10:34,534 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:34 +0000 (0:00:00.196) 0:00:46.187 ******** 2025-10-06 21:10:39,633 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:39,647 p=28169 u=zuul n=ansible | TASK [ci_setup : Add the OC path to cifmw_path if needed cifmw_path={{ cifmw_ci_setup_oc_install_path }}:{{ ansible_env.PATH }}, cacheable=True] *** 2025-10-06 21:10:39,648 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:39 +0000 (0:00:05.113) 0:00:51.300 ******** 2025-10-06 21:10:39,674 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:39,681 p=28169 u=zuul n=ansible | TASK [ci_setup : Create completion file] *************************************** 2025-10-06 21:10:39,681 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:39 +0000 (0:00:00.033) 0:00:51.333 ******** 2025-10-06 21:10:39,970 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:39,979 p=28169 u=zuul n=ansible | TASK [ci_setup : Source completion from within .bashrc create=True, mode=0644, path={{ ansible_user_dir }}/.bashrc, block=if [ -f ~/.oc_completion ]; then source ~/.oc_completion fi] *** 2025-10-06 21:10:39,979 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:39 +0000 (0:00:00.298) 0:00:51.632 ******** 2025-10-06 21:10:40,258 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:40,264 p=28169 u=zuul n=ansible | TASK [ci_setup : Check rhsm status _raw_params=subscription-manager status] **** 2025-10-06 21:10:40,265 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.285) 0:00:51.917 ******** 2025-10-06 21:10:40,278 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,285 p=28169 u=zuul n=ansible | TASK [ci_setup : Gather the repos to be enabled _repos={{ cifmw_ci_setup_rhel_rhsm_default_repos + (cifmw_ci_setup_rhel_rhsm_extra_repos | default([])) }}] *** 2025-10-06 21:10:40,285 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.020) 0:00:51.937 ******** 2025-10-06 21:10:40,298 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,304 p=28169 u=zuul n=ansible | TASK [ci_setup : Enabling the required repositories. name={{ item }}, state={{ rhsm_repo_state | default('enabled') }}] *** 2025-10-06 21:10:40,305 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.019) 0:00:51.957 ******** 2025-10-06 21:10:40,319 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,326 p=28169 u=zuul n=ansible | TASK [ci_setup : Get current /etc/redhat-release _raw_params=cat /etc/redhat-release] *** 2025-10-06 21:10:40,326 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.021) 0:00:51.979 ******** 2025-10-06 21:10:40,340 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,347 p=28169 u=zuul n=ansible | TASK [ci_setup : Print current /etc/redhat-release msg={{ _current_rh_release.stdout }}] *** 2025-10-06 21:10:40,347 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.020) 0:00:51.999 ******** 2025-10-06 21:10:40,361 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,367 p=28169 u=zuul n=ansible | TASK [ci_setup : Ensure the repos are enabled in the system using yum name={{ item.name }}, baseurl={{ item.baseurl }}, description={{ item.description | default(item.name) }}, gpgcheck={{ item.gpgcheck | default(false) }}, enabled=True, state={{ yum_repo_state | default('present') }}] *** 2025-10-06 21:10:40,367 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.020) 0:00:52.020 ******** 2025-10-06 21:10:40,386 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,392 p=28169 u=zuul n=ansible | TASK [ci_setup : Manage directories path={{ item }}, state={{ directory_state }}, mode=0755, owner={{ ansible_user_id }}, group={{ ansible_user_id }}] *** 2025-10-06 21:10:40,393 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.025) 0:00:52.045 ******** 2025-10-06 21:10:40,620 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/manifests/openstack/cr) 2025-10-06 21:10:40,812 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/logs) 2025-10-06 21:10:41,028 p=28169 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/tmp) 2025-10-06 21:10:41,254 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/volumes) 2025-10-06 21:10:41,466 p=28169 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-10-06 21:10:41,478 p=28169 u=zuul n=ansible | TASK [Prepare install_yamls make targets name=install_yamls, apply={'tags': ['bootstrap']}] *** 2025-10-06 21:10:41,478 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:41 +0000 (0:00:01.085) 0:00:53.131 ******** 2025-10-06 21:10:41,622 p=28169 u=zuul n=ansible | TASK [install_yamls : Ensure directories exist path={{ item }}, state=directory, mode=0755] *** 2025-10-06 21:10:41,622 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:41 +0000 (0:00:00.143) 0:00:53.275 ******** 2025-10-06 21:10:41,838 p=28169 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts) 2025-10-06 21:10:42,034 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes/tasks) 2025-10-06 21:10:42,186 p=28169 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-10-06 21:10:42,200 p=28169 u=zuul n=ansible | TASK [Create variables with local repos based on Zuul items name=install_yamls, tasks_from=zuul_set_operators_repo.yml] *** 2025-10-06 21:10:42,200 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.577) 0:00:53.852 ******** 2025-10-06 21:10:42,243 p=28169 u=zuul n=ansible | TASK [install_yamls : Set fact with local repos based on Zuul items cifmw_install_yamls_operators_repo={{ cifmw_install_yamls_operators_repo | default({}) | combine(_repo_operator_info | items2dict) }}] *** 2025-10-06 21:10:42,243 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.043) 0:00:53.896 ******** 2025-10-06 21:10:42,295 p=28169 u=zuul n=ansible | ok: [localhost] => (item={'branch': 'main', 'change': '287', 'change_url': 'https://github.com/openstack-k8s-operators/watcher-operator/pull/287', 'commit_id': '14377136e67c9cd67507a059bfde2f19f140387d', 'patchset': '14377136e67c9cd67507a059bfde2f19f140387d', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/openstack-k8s-operators/watcher-operator', 'name': 'openstack-k8s-operators/watcher-operator', 'short_name': 'watcher-operator', 'src_dir': 'src/github.com/openstack-k8s-operators/watcher-operator'}, 'topic': None}) 2025-10-06 21:10:42,302 p=28169 u=zuul n=ansible | TASK [install_yamls : Print helpful data for debugging msg=_repo_operator_name: {{ _repo_operator_name }} _repo_operator_info: {{ _repo_operator_info }} cifmw_install_yamls_operators_repo: {{ cifmw_install_yamls_operators_repo }} ] *** 2025-10-06 21:10:42,302 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.058) 0:00:53.954 ******** 2025-10-06 21:10:42,349 p=28169 u=zuul n=ansible | ok: [localhost] => (item={'branch': 'main', 'change': '287', 'change_url': 'https://github.com/openstack-k8s-operators/watcher-operator/pull/287', 'commit_id': '14377136e67c9cd67507a059bfde2f19f140387d', 'patchset': '14377136e67c9cd67507a059bfde2f19f140387d', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/openstack-k8s-operators/watcher-operator', 'name': 'openstack-k8s-operators/watcher-operator', 'short_name': 'watcher-operator', 'src_dir': 'src/github.com/openstack-k8s-operators/watcher-operator'}, 'topic': None}) => msg: | _repo_operator_name: watcher _repo_operator_info: [{'key': 'WATCHER_REPO', 'value': '/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator'}, {'key': 'WATCHER_BRANCH', 'value': ''}] cifmw_install_yamls_operators_repo: {'WATCHER_REPO': '/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator', 'WATCHER_BRANCH': ''} 2025-10-06 21:10:42,361 p=28169 u=zuul n=ansible | TASK [install_yamls : Compute the cifmw_install_yamls_vars final value _install_yamls_override_vars={{ _install_yamls_override_vars | default({}) | combine(item, recursive=True) }}] *** 2025-10-06 21:10:42,362 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.059) 0:00:54.014 ******** 2025-10-06 21:10:42,469 p=28169 u=zuul n=ansible | ok: [localhost] => (item={'BMO_SETUP': False, 'INSTALL_CERT_MANAGER': False}) 2025-10-06 21:10:42,477 p=28169 u=zuul n=ansible | TASK [install_yamls : Set environment override cifmw_install_yamls_environment fact cifmw_install_yamls_environment={{ _install_yamls_override_vars.keys() | map('upper') | zip(_install_yamls_override_vars.values()) | items2dict(key_name=0, value_name=1) | combine({ 'OUT': cifmw_install_yamls_manifests_dir, 'OUTPUT_DIR': cifmw_install_yamls_edpm_dir, 'CHECKOUT_FROM_OPENSTACK_REF': cifmw_install_yamls_checkout_openstack_ref, 'OPENSTACK_K8S_BRANCH': (zuul is defined and not zuul.branch |regex_search('master|rhos')) | ternary(zuul.branch, 'main') }) | combine(install_yamls_operators_repos) }}, cacheable=True] *** 2025-10-06 21:10:42,477 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.115) 0:00:54.129 ******** 2025-10-06 21:10:42,514 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:42,520 p=28169 u=zuul n=ansible | TASK [install_yamls : Get environment structure base_path={{ cifmw_install_yamls_repo }}] *** 2025-10-06 21:10:42,521 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.043) 0:00:54.173 ******** 2025-10-06 21:10:43,052 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:43,059 p=28169 u=zuul n=ansible | TASK [install_yamls : Ensure Output directory exists path={{ cifmw_install_yamls_out_dir }}, state=directory, mode=0755] *** 2025-10-06 21:10:43,059 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.538) 0:00:54.712 ******** 2025-10-06 21:10:43,245 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:43,252 p=28169 u=zuul n=ansible | TASK [install_yamls : Ensure user cifmw_install_yamls_vars contains existing Makefile variables that=_cifmw_install_yamls_unmatched_vars | length == 0, msg=cifmw_install_yamls_vars contains a variable that is not defined in install_yamls Makefile nor cifmw_install_yamls_whitelisted_vars: {{ _cifmw_install_yamls_unmatched_vars | join(', ')}}, quiet=True] *** 2025-10-06 21:10:43,252 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.192) 0:00:54.904 ******** 2025-10-06 21:10:43,283 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:43,296 p=28169 u=zuul n=ansible | TASK [install_yamls : Generate /home/zuul/ci-framework-data/artifacts/install_yamls.sh dest={{ cifmw_install_yamls_out_dir }}/{{ cifmw_install_yamls_envfile }}, content={% for k,v in cifmw_install_yamls_environment.items() %} export {{ k }}={{ v }} {% endfor %}, mode=0644] *** 2025-10-06 21:10:43,296 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.044) 0:00:54.948 ******** 2025-10-06 21:10:43,723 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:43,735 p=28169 u=zuul n=ansible | TASK [install_yamls : Set install_yamls default values cifmw_install_yamls_defaults={{ get_makefiles_env_output.makefiles_values | combine(cifmw_install_yamls_environment) }}, cacheable=True] *** 2025-10-06 21:10:43,735 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.438) 0:00:55.387 ******** 2025-10-06 21:10:43,757 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:43,764 p=28169 u=zuul n=ansible | TASK [install_yamls : Show the env structure var=cifmw_install_yamls_environment] *** 2025-10-06 21:10:43,764 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.029) 0:00:55.417 ******** 2025-10-06 21:10:43,778 p=28169 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_environment: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' INSTALL_CERT_MANAGER: false OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm WATCHER_BRANCH: '' WATCHER_REPO: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator 2025-10-06 21:10:43,785 p=28169 u=zuul n=ansible | TASK [install_yamls : Show the env structure defaults var=cifmw_install_yamls_defaults] *** 2025-10-06 21:10:43,785 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.021) 0:00:55.438 ******** 2025-10-06 21:10:43,809 p=28169 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/tests/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/tests/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: false BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/tests/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 767c3ed056cbaa3b9dfedb8c6f825bf0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/tests/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/tests/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/tests/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: false INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/tests/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/tests/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '1234567842' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/tests/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/tests/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/tests/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12345678' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/tests/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: osp-secret SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/tests/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: tests/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' WATCHER_BRANCH: '' WATCHER_REPO: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator tripleo_deploy: 'export REGISTRY_USER:' 2025-10-06 21:10:43,816 p=28169 u=zuul n=ansible | TASK [install_yamls : Generate make targets install_yamls_path={{ cifmw_install_yamls_repo }}, output_directory={{ cifmw_install_yamls_tasks_out }}] *** 2025-10-06 21:10:43,816 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.030) 0:00:55.468 ******** 2025-10-06 21:10:44,118 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:44,128 p=28169 u=zuul n=ansible | TASK [install_yamls : Debug generate_make module var=cifmw_generate_makes] ***** 2025-10-06 21:10:44,128 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:44 +0000 (0:00:00.312) 0:00:55.780 ******** 2025-10-06 21:10:44,149 p=28169 u=zuul n=ansible | ok: [localhost] => cifmw_generate_makes: changed: false debug: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/Makefile: - all - help - cleanup - deploy_cleanup - wait - crc_storage - crc_storage_cleanup - crc_storage_release - crc_storage_with_retries - crc_storage_cleanup_with_retries - operator_namespace - namespace - namespace_cleanup - input - input_cleanup - crc_bmo_setup - crc_bmo_cleanup - openstack_prep - openstack - openstack_wait - openstack_init - openstack_cleanup - openstack_repo - openstack_deploy_prep - openstack_deploy - openstack_wait_deploy - openstack_deploy_cleanup - openstack_update_run - update_services - update_system - openstack_patch_version - edpm_deploy_generate_keys - edpm_patch_ansible_runner_image - edpm_deploy_prep - edpm_deploy_cleanup - edpm_deploy - edpm_deploy_baremetal_prep - edpm_deploy_baremetal - edpm_wait_deploy_baremetal - edpm_wait_deploy - edpm_register_dns - edpm_nova_discover_hosts - openstack_crds - openstack_crds_cleanup - edpm_deploy_networker_prep - edpm_deploy_networker_cleanup - edpm_deploy_networker - infra_prep - infra - infra_cleanup - dns_deploy_prep - dns_deploy - dns_deploy_cleanup - netconfig_deploy_prep - netconfig_deploy - netconfig_deploy_cleanup - memcached_deploy_prep - memcached_deploy - memcached_deploy_cleanup - keystone_prep - keystone - keystone_cleanup - keystone_deploy_prep - keystone_deploy - keystone_deploy_cleanup - barbican_prep - barbican - barbican_cleanup - barbican_deploy_prep - barbican_deploy - barbican_deploy_validate - barbican_deploy_cleanup - mariadb - mariadb_cleanup - mariadb_deploy_prep - mariadb_deploy - mariadb_deploy_cleanup - placement_prep - placement - placement_cleanup - placement_deploy_prep - placement_deploy - placement_deploy_cleanup - glance_prep - glance - glance_cleanup - glance_deploy_prep - glance_deploy - glance_deploy_cleanup - ovn_prep - ovn - ovn_cleanup - ovn_deploy_prep - ovn_deploy - ovn_deploy_cleanup - neutron_prep - neutron - neutron_cleanup - neutron_deploy_prep - neutron_deploy - neutron_deploy_cleanup - cinder_prep - cinder - cinder_cleanup - cinder_deploy_prep - cinder_deploy - cinder_deploy_cleanup - rabbitmq_prep - rabbitmq - rabbitmq_cleanup - rabbitmq_deploy_prep - rabbitmq_deploy - rabbitmq_deploy_cleanup - ironic_prep - ironic - ironic_cleanup - ironic_deploy_prep - ironic_deploy - ironic_deploy_cleanup - octavia_prep - octavia - octavia_cleanup - octavia_deploy_prep - octavia_deploy - octavia_deploy_cleanup - designate_prep - designate - designate_cleanup - designate_deploy_prep - designate_deploy - designate_deploy_cleanup - nova_prep - nova - nova_cleanup - nova_deploy_prep - nova_deploy - nova_deploy_cleanup - mariadb_kuttl_run - mariadb_kuttl - kuttl_db_prep - kuttl_db_cleanup - kuttl_common_prep - kuttl_common_cleanup - keystone_kuttl_run - keystone_kuttl - barbican_kuttl_run - barbican_kuttl - placement_kuttl_run - placement_kuttl - cinder_kuttl_run - cinder_kuttl - neutron_kuttl_run - neutron_kuttl - octavia_kuttl_run - octavia_kuttl - designate_kuttl - designate_kuttl_run - ovn_kuttl_run - ovn_kuttl - infra_kuttl_run - infra_kuttl - ironic_kuttl_run - ironic_kuttl - ironic_kuttl_crc - heat_kuttl_run - heat_kuttl - heat_kuttl_crc - ansibleee_kuttl_run - ansibleee_kuttl_cleanup - ansibleee_kuttl_prep - ansibleee_kuttl - glance_kuttl_run - glance_kuttl - manila_kuttl_run - manila_kuttl - swift_kuttl_run - swift_kuttl - horizon_kuttl_run - horizon_kuttl - openstack_kuttl_run - openstack_kuttl - mariadb_chainsaw_run - mariadb_chainsaw - horizon_prep - horizon - horizon_cleanup - horizon_deploy_prep - horizon_deploy - horizon_deploy_cleanup - heat_prep - heat - heat_cleanup - heat_deploy_prep - heat_deploy - heat_deploy_cleanup - ansibleee_prep - ansibleee - ansibleee_cleanup - baremetal_prep - baremetal - baremetal_cleanup - ceph_help - ceph - ceph_cleanup - rook_prep - rook - rook_deploy_prep - rook_deploy - rook_crc_disk - rook_cleanup - lvms - nmstate - nncp - nncp_cleanup - netattach - netattach_cleanup - metallb - metallb_config - metallb_config_cleanup - metallb_cleanup - loki - loki_cleanup - loki_deploy - loki_deploy_cleanup - netobserv - netobserv_cleanup - netobserv_deploy - netobserv_deploy_cleanup - manila_prep - manila - manila_cleanup - manila_deploy_prep - manila_deploy - manila_deploy_cleanup - telemetry_prep - telemetry - telemetry_cleanup - telemetry_deploy_prep - telemetry_deploy - telemetry_deploy_cleanup - telemetry_kuttl_run - telemetry_kuttl - swift_prep - swift - swift_cleanup - swift_deploy_prep - swift_deploy - swift_deploy_cleanup - certmanager - certmanager_cleanup - validate_marketplace - redis_deploy_prep - redis_deploy - redis_deploy_cleanup - set_slower_etcd_profile /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/Makefile: - help - download_tools - nfs - nfs_cleanup - crc - crc_cleanup - crc_scrub - crc_attach_default_interface - crc_attach_default_interface_cleanup - ipv6_lab_network - ipv6_lab_network_cleanup - ipv6_lab_nat64_router - ipv6_lab_nat64_router_cleanup - ipv6_lab_sno - ipv6_lab_sno_cleanup - ipv6_lab - ipv6_lab_cleanup - attach_default_interface - attach_default_interface_cleanup - network_isolation_bridge - network_isolation_bridge_cleanup - edpm_baremetal_compute - edpm_compute - edpm_compute_bootc - edpm_ansible_runner - edpm_computes_bgp - edpm_compute_repos - edpm_compute_cleanup - edpm_networker - edpm_networker_cleanup - edpm_deploy_instance - tripleo_deploy - standalone_deploy - standalone_sync - standalone - standalone_cleanup - standalone_snapshot - standalone_revert - cifmw_prepare - cifmw_cleanup - bmaas_network - bmaas_network_cleanup - bmaas_route_crc_and_crc_bmaas_networks - bmaas_route_crc_and_crc_bmaas_networks_cleanup - bmaas_crc_attach_network - bmaas_crc_attach_network_cleanup - bmaas_crc_baremetal_bridge - bmaas_crc_baremetal_bridge_cleanup - bmaas_baremetal_net_nad - bmaas_baremetal_net_nad_cleanup - bmaas_metallb - bmaas_metallb_cleanup - bmaas_virtual_bms - bmaas_virtual_bms_cleanup - bmaas_sushy_emulator - bmaas_sushy_emulator_cleanup - bmaas_sushy_emulator_wait - bmaas_generate_nodes_yaml - bmaas - bmaas_cleanup failed: false success: true 2025-10-06 21:10:44,159 p=28169 u=zuul n=ansible | TASK [install_yamls : Create the install_yamls parameters file dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml, content={{ { 'cifmw_install_yamls_environment': cifmw_install_yamls_environment, 'cifmw_install_yamls_defaults': cifmw_install_yamls_defaults } | to_nice_yaml }}, mode=0644] *** 2025-10-06 21:10:44,159 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:44 +0000 (0:00:00.030) 0:00:55.811 ******** 2025-10-06 21:10:44,564 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:44,579 p=28169 u=zuul n=ansible | TASK [install_yamls : Create empty cifmw_install_yamls_environment if needed cifmw_install_yamls_environment={}] *** 2025-10-06 21:10:44,579 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:44 +0000 (0:00:00.420) 0:00:56.232 ******** 2025-10-06 21:10:44,602 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:44,630 p=28169 u=zuul n=ansible | TASK [discover_latest_image : Get latest image url={{ cifmw_discover_latest_image_base_url }}, image_prefix={{ cifmw_discover_latest_image_qcow_prefix }}, images_file={{ cifmw_discover_latest_image_images_file }}] *** 2025-10-06 21:10:44,630 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:44 +0000 (0:00:00.051) 0:00:56.283 ******** 2025-10-06 21:10:45,288 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:45,295 p=28169 u=zuul n=ansible | TASK [discover_latest_image : Export facts accordingly cifmw_discovered_image_name={{ discovered_image['data']['image_name'] }}, cifmw_discovered_image_url={{ discovered_image['data']['image_url'] }}, cifmw_discovered_hash={{ discovered_image['data']['hash'] }}, cifmw_discovered_hash_algorithm={{ discovered_image['data']['hash_algorithm'] }}, cacheable=True] *** 2025-10-06 21:10:45,295 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:45 +0000 (0:00:00.664) 0:00:56.948 ******** 2025-10-06 21:10:45,318 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:45,330 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Create artifacts with custom params mode=0644, dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/custom-params.yml, content={{ ci_framework_params | to_nice_yaml }}] *** 2025-10-06 21:10:45,330 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:45 +0000 (0:00:00.034) 0:00:56.982 ******** 2025-10-06 21:10:45,728 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:45,740 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:10:45,740 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:45 +0000 (0:00:00.410) 0:00:57.393 ******** 2025-10-06 21:10:45,821 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:45,828 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:10:45,828 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:45 +0000 (0:00:00.087) 0:00:57.481 ******** 2025-10-06 21:10:45,931 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:45,939 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_infra _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:10:45,939 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:45 +0000 (0:00:00.110) 0:00:57.591 ******** 2025-10-06 21:10:46,054 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': 'Download needed tools', 'inventory': 'localhost,', 'connection': 'local', 'type': 'playbook', 'source': '/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml'}) 2025-10-06 21:10:46,064 p=28169 u=zuul n=ansible | TASK [run_hook : Set playbook path for Download needed tools cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e operator_namespace={{ _operator_namespace }} -e namespace={{ _namespace}} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2025-10-06 21:10:46,064 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.125) 0:00:57.717 ******** 2025-10-06 21:10:46,111 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,119 p=28169 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2025-10-06 21:10:46,119 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.054) 0:00:57.772 ******** 2025-10-06 21:10:46,305 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,320 p=28169 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2025-10-06 21:10:46,320 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.200) 0:00:57.973 ******** 2025-10-06 21:10:46,339 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:46,353 p=28169 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2025-10-06 21:10:46,353 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.033) 0:00:58.006 ******** 2025-10-06 21:10:46,557 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,568 p=28169 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2025-10-06 21:10:46,568 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.214) 0:00:58.220 ******** 2025-10-06 21:10:46,586 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,596 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2025-10-06 21:10:46,597 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.028) 0:00:58.249 ******** 2025-10-06 21:10:46,775 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,782 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:10:46,782 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.185) 0:00:58.434 ******** 2025-10-06 21:10:46,950 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,960 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook without retry - Download needed tools] *************** 2025-10-06 21:10:46,960 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.178) 0:00:58.613 ******** 2025-10-06 21:10:47,025 p=28169 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_000_run_hook_without_retry.log 2025-10-06 21:11:17,763 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:17,772 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook with retry - Download needed tools] ****************** 2025-10-06 21:11:17,772 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:17 +0000 (0:00:30.811) 0:01:29.424 ******** 2025-10-06 21:11:17,799 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:17,810 p=28169 u=zuul n=ansible | TASK [run_hook : Check if we have a file path={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:11:17,810 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:17 +0000 (0:00:00.038) 0:01:29.462 ******** 2025-10-06 21:11:17,973 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:17,990 p=28169 u=zuul n=ansible | TASK [run_hook : Load generated content in main playbook file={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:11:17,991 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:17 +0000 (0:00:00.180) 0:01:29.643 ******** 2025-10-06 21:11:18,007 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,068 p=28169 u=zuul n=ansible | PLAY [Prepare host virtualization] ********************************************* 2025-10-06 21:11:18,094 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2025-10-06 21:11:18,094 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.103) 0:01:29.746 ******** 2025-10-06 21:11:18,155 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:18,162 p=28169 u=zuul n=ansible | TASK [Ensure libvirt is present/configured name=libvirt_manager] *************** 2025-10-06 21:11:18,162 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.068) 0:01:29.814 ******** 2025-10-06 21:11:18,186 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,193 p=28169 u=zuul n=ansible | TASK [Perpare OpenShift provisioner node name=openshift_provisioner_node] ****** 2025-10-06 21:11:18,193 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.030) 0:01:29.845 ******** 2025-10-06 21:11:18,215 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,246 p=28169 u=zuul n=ansible | PLAY [Run cifmw_setup infra, build package, container and operators, deploy EDPM] *** 2025-10-06 21:11:18,280 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2025-10-06 21:11:18,280 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.087) 0:01:29.933 ******** 2025-10-06 21:11:18,336 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:18,344 p=28169 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Environment Definition file existence path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2025-10-06 21:11:18,344 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.063) 0:01:29.996 ******** 2025-10-06 21:11:18,524 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:18,531 p=28169 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Definition file existance that=['_net_env_def_stat.stat.exists'], msg=Ensure that the Networking Environment Definition file exists in {{ cifmw_networking_mapper_networking_env_def_path }}, quiet=True] *** 2025-10-06 21:11:18,531 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.187) 0:01:30.184 ******** 2025-10-06 21:11:18,551 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,559 p=28169 u=zuul n=ansible | TASK [networking_mapper : Load the Networking Definition from file path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2025-10-06 21:11:18,559 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.027) 0:01:30.211 ******** 2025-10-06 21:11:18,579 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,587 p=28169 u=zuul n=ansible | TASK [networking_mapper : Set cifmw_networking_env_definition is present cifmw_networking_env_definition={{ _net_env_def_slurp['content'] | b64decode | from_yaml }}, cacheable=True] *** 2025-10-06 21:11:18,587 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.027) 0:01:30.239 ******** 2025-10-06 21:11:18,606 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,620 p=28169 u=zuul n=ansible | TASK [Deploy OCP using Hive name=hive] ***************************************** 2025-10-06 21:11:18,620 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.033) 0:01:30.273 ******** 2025-10-06 21:11:18,641 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,649 p=28169 u=zuul n=ansible | TASK [Prepare CRC name=rhol_crc] *********************************************** 2025-10-06 21:11:18,649 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.028) 0:01:30.301 ******** 2025-10-06 21:11:18,672 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,679 p=28169 u=zuul n=ansible | TASK [Deploy OpenShift cluster using dev-scripts name=devscripts] ************** 2025-10-06 21:11:18,680 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.030) 0:01:30.332 ******** 2025-10-06 21:11:18,699 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,707 p=28169 u=zuul n=ansible | TASK [openshift_login : Ensure output directory exists path={{ cifmw_openshift_login_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:11:18,707 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.027) 0:01:30.359 ******** 2025-10-06 21:11:18,887 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:18,903 p=28169 u=zuul n=ansible | TASK [openshift_login : OpenShift login _raw_params=login.yml] ***************** 2025-10-06 21:11:18,904 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.197) 0:01:30.556 ******** 2025-10-06 21:11:18,954 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/login.yml for localhost 2025-10-06 21:11:18,971 p=28169 u=zuul n=ansible | TASK [openshift_login : Check if the password file is present path={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2025-10-06 21:11:18,971 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.067) 0:01:30.623 ******** 2025-10-06 21:11:18,992 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,002 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch user password content src={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2025-10-06 21:11:19,002 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.031) 0:01:30.655 ******** 2025-10-06 21:11:19,023 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,034 p=28169 u=zuul n=ansible | TASK [openshift_login : Set user password as a fact cifmw_openshift_login_password={{ cifmw_openshift_login_password_file_slurp.content | b64decode }}, cacheable=True] *** 2025-10-06 21:11:19,034 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.031) 0:01:30.686 ******** 2025-10-06 21:11:19,055 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,066 p=28169 u=zuul n=ansible | TASK [openshift_login : Set role variables cifmw_openshift_login_kubeconfig={{ cifmw_openshift_login_kubeconfig | default(cifmw_openshift_kubeconfig) | default( ansible_env.KUBECONFIG if 'KUBECONFIG' in ansible_env else cifmw_openshift_login_kubeconfig_default_path ) | trim }}, cifmw_openshift_login_user={{ cifmw_openshift_login_user | default(cifmw_openshift_user) | default(omit) }}, cifmw_openshift_login_password={{ cifmw_openshift_login_password | default(cifmw_openshift_password) | default(omit) }}, cifmw_openshift_login_api={{ cifmw_openshift_login_api | default(cifmw_openshift_api) | default(omit) }}, cifmw_openshift_login_cert_login={{ cifmw_openshift_login_cert_login | default(false)}}, cifmw_openshift_login_provided_token={{ cifmw_openshift_provided_token | default(omit) }}, cacheable=True] *** 2025-10-06 21:11:19,066 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.032) 0:01:30.718 ******** 2025-10-06 21:11:19,101 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:19,110 p=28169 u=zuul n=ansible | TASK [openshift_login : Check if kubeconfig exists path={{ cifmw_openshift_login_kubeconfig }}] *** 2025-10-06 21:11:19,110 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.044) 0:01:30.763 ******** 2025-10-06 21:11:19,277 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:19,285 p=28169 u=zuul n=ansible | TASK [openshift_login : Assert that enough data is provided to log in to OpenShift that=cifmw_openshift_login_kubeconfig_stat.stat.exists or (cifmw_openshift_login_provided_token is defined and cifmw_openshift_login_provided_token != '') or ( (cifmw_openshift_login_user is defined) and (cifmw_openshift_login_password is defined) and (cifmw_openshift_login_api is defined) ), msg=If an existing kubeconfig is not provided user/pwd or provided/initial token and API URL must be given] *** 2025-10-06 21:11:19,285 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.174) 0:01:30.938 ******** 2025-10-06 21:11:19,317 p=28169 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-10-06 21:11:19,328 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch kubeconfig content src={{ cifmw_openshift_login_kubeconfig }}] *** 2025-10-06 21:11:19,328 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.043) 0:01:30.981 ******** 2025-10-06 21:11:19,349 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,360 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch x509 key based users cifmw_openshift_login_key_based_users={{ ( cifmw_openshift_login_kubeconfig_content_b64.content | b64decode | from_yaml ). users | default([]) | selectattr('user.client-certificate-data', 'defined') | map(attribute="name") | map("split", "/") | map("first") }}, cacheable=True] *** 2025-10-06 21:11:19,360 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.031) 0:01:31.012 ******** 2025-10-06 21:11:19,385 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,396 p=28169 u=zuul n=ansible | TASK [openshift_login : Assign key based user if not provided and available cifmw_openshift_login_user={{ (cifmw_openshift_login_assume_cert_system_user | ternary('system:', '')) + (cifmw_openshift_login_key_based_users | map('replace', 'system:', '') | unique | first) }}, cifmw_openshift_login_cert_login=True, cacheable=True] *** 2025-10-06 21:11:19,396 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.036) 0:01:31.049 ******** 2025-10-06 21:11:19,422 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,433 p=28169 u=zuul n=ansible | TASK [openshift_login : Set the retry count cifmw_openshift_login_retries_cnt={{ 0 if cifmw_openshift_login_retries_cnt is undefined else cifmw_openshift_login_retries_cnt|int + 1 }}] *** 2025-10-06 21:11:19,433 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.037) 0:01:31.086 ******** 2025-10-06 21:11:19,465 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:19,477 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch token _raw_params=try_login.yml] ***************** 2025-10-06 21:11:19,477 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.043) 0:01:31.129 ******** 2025-10-06 21:11:19,507 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/try_login.yml for localhost 2025-10-06 21:11:19,523 p=28169 u=zuul n=ansible | TASK [openshift_login : Try get OpenShift access token _raw_params=oc whoami -t] *** 2025-10-06 21:11:19,523 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.046) 0:01:31.175 ******** 2025-10-06 21:11:19,539 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,548 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift token output_dir={{ cifmw_openshift_login_basedir }}/artifacts, script=oc login {%- if cifmw_openshift_login_provided_token is not defined %} {%- if cifmw_openshift_login_user is defined %} -u {{ cifmw_openshift_login_user }} {%- endif %} {%- if cifmw_openshift_login_password is defined %} -p {{ cifmw_openshift_login_password }} {%- endif %} {% else %} --token={{ cifmw_openshift_login_provided_token }} {%- endif %} {%- if cifmw_openshift_login_skip_tls_verify|bool %} --insecure-skip-tls-verify=true {%- endif %} {%- if cifmw_openshift_login_api is defined %} {{ cifmw_openshift_login_api }} {%- endif %}] *** 2025-10-06 21:11:19,548 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.024) 0:01:31.200 ******** 2025-10-06 21:11:19,605 p=28169 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_001_fetch_openshift.log 2025-10-06 21:11:20,140 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:20,156 p=28169 u=zuul n=ansible | TASK [openshift_login : Ensure kubeconfig is provided that=cifmw_openshift_login_kubeconfig != ""] *** 2025-10-06 21:11:20,156 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:20 +0000 (0:00:00.608) 0:01:31.808 ******** 2025-10-06 21:11:20,191 p=28169 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-10-06 21:11:20,242 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch new OpenShift access token _raw_params=oc whoami -t] *** 2025-10-06 21:11:20,242 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:20 +0000 (0:00:00.085) 0:01:31.894 ******** 2025-10-06 21:11:20,531 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:20,539 p=28169 u=zuul n=ansible | TASK [openshift_login : Set new OpenShift token cifmw_openshift_login_token={{ (not cifmw_openshift_login_new_token_out.skipped | default(false)) | ternary(cifmw_openshift_login_new_token_out.stdout, cifmw_openshift_login_whoami_out.stdout) }}, cacheable=True] *** 2025-10-06 21:11:20,540 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:20 +0000 (0:00:00.297) 0:01:32.192 ******** 2025-10-06 21:11:20,566 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:20,573 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift API URL _raw_params=oc whoami --show-server=true] *** 2025-10-06 21:11:20,573 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:20 +0000 (0:00:00.033) 0:01:32.225 ******** 2025-10-06 21:11:20,899 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:20,907 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift kubeconfig context _raw_params=oc whoami -c] *** 2025-10-06 21:11:20,908 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:20 +0000 (0:00:00.334) 0:01:32.560 ******** 2025-10-06 21:11:21,203 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:21,213 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift current user _raw_params=oc whoami] **** 2025-10-06 21:11:21,213 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:21 +0000 (0:00:00.305) 0:01:32.865 ******** 2025-10-06 21:11:21,524 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:21,533 p=28169 u=zuul n=ansible | TASK [openshift_login : Set OpenShift user, context and API facts cifmw_openshift_login_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_login_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_login_user={{ _oauth_user }}, cifmw_openshift_kubeconfig={{ cifmw_openshift_login_kubeconfig }}, cifmw_openshift_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_user={{ _oauth_user }}, cifmw_openshift_token={{ cifmw_openshift_login_token | default(omit) }}, cifmw_install_yamls_environment={{ ( cifmw_install_yamls_environment | combine({'KUBECONFIG': cifmw_openshift_login_kubeconfig}) ) if cifmw_install_yamls_environment is defined else omit }}, cacheable=True] *** 2025-10-06 21:11:21,534 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:21 +0000 (0:00:00.320) 0:01:33.186 ******** 2025-10-06 21:11:21,577 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:21,584 p=28169 u=zuul n=ansible | TASK [openshift_login : Create the openshift_login parameters file dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/openshift-login-params.yml, content={{ cifmw_openshift_login_params_content | from_yaml | to_nice_yaml }}, mode=0600] *** 2025-10-06 21:11:21,584 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:21 +0000 (0:00:00.050) 0:01:33.237 ******** 2025-10-06 21:11:22,016 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:22,023 p=28169 u=zuul n=ansible | TASK [openshift_login : Read the install yamls parameters file path={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml] *** 2025-10-06 21:11:22,024 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:22 +0000 (0:00:00.439) 0:01:33.676 ******** 2025-10-06 21:11:22,351 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:22,364 p=28169 u=zuul n=ansible | TASK [openshift_login : Append the KUBECONFIG to the install yamls parameters content={{ cifmw_openshift_login_install_yamls_artifacts_slurp['content'] | b64decode | from_yaml | combine( { 'cifmw_install_yamls_environment': { 'KUBECONFIG': cifmw_openshift_login_kubeconfig } }, recursive=true) | to_nice_yaml }}, dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml, mode=0600] *** 2025-10-06 21:11:22,364 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:22 +0000 (0:00:00.340) 0:01:34.016 ******** 2025-10-06 21:11:22,812 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:22,842 p=28169 u=zuul n=ansible | TASK [openshift_setup : Ensure output directory exists path={{ cifmw_openshift_setup_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:11:22,842 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:22 +0000 (0:00:00.478) 0:01:34.495 ******** 2025-10-06 21:11:23,046 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:23,056 p=28169 u=zuul n=ansible | TASK [openshift_setup : Fetch namespaces to create cifmw_openshift_setup_namespaces={{ (( ([cifmw_install_yamls_defaults['NAMESPACE']] + ([cifmw_install_yamls_defaults['OPERATOR_NAMESPACE']] if 'OPERATOR_NAMESPACE' is in cifmw_install_yamls_defaults else []) ) if cifmw_install_yamls_defaults is defined else [] ) + cifmw_openshift_setup_create_namespaces) | unique }}] *** 2025-10-06 21:11:23,056 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:23 +0000 (0:00:00.213) 0:01:34.709 ******** 2025-10-06 21:11:23,081 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:23,097 p=28169 u=zuul n=ansible | TASK [openshift_setup : Create required namespaces kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ item }}, kind=Namespace, state=present] *** 2025-10-06 21:11:23,097 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:23 +0000 (0:00:00.041) 0:01:34.750 ******** 2025-10-06 21:11:23,993 p=28169 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2025-10-06 21:11:24,667 p=28169 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2025-10-06 21:11:24,679 p=28169 u=zuul n=ansible | TASK [openshift_setup : Get internal OpenShift registry route kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Route, name=default-route, namespace=openshift-image-registry] *** 2025-10-06 21:11:24,679 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:01.581) 0:01:36.331 ******** 2025-10-06 21:11:24,694 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,704 p=28169 u=zuul n=ansible | TASK [openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces state=present, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'kind': 'RoleBinding', 'apiVersion': 'rbac.authorization.k8s.io/v1', 'metadata': {'name': 'system:image-puller', 'namespace': '{{ item }}'}, 'subjects': [{'kind': 'User', 'name': 'system:anonymous'}, {'kind': 'User', 'name': 'system:unauthenticated'}], 'roleRef': {'kind': 'ClusterRole', 'name': 'system:image-puller'}}] *** 2025-10-06 21:11:24,705 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.025) 0:01:36.357 ******** 2025-10-06 21:11:24,725 p=28169 u=zuul n=ansible | skipping: [localhost] => (item=openstack) 2025-10-06 21:11:24,726 p=28169 u=zuul n=ansible | skipping: [localhost] => (item=openstack-operators) 2025-10-06 21:11:24,726 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,735 p=28169 u=zuul n=ansible | TASK [openshift_setup : Wait for the image registry to be ready kind=Deployment, name=image-registry, namespace=openshift-image-registry, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, wait=True, wait_sleep=10, wait_timeout=600, wait_condition={'type': 'Available', 'status': 'True'}] *** 2025-10-06 21:11:24,735 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.030) 0:01:36.387 ******** 2025-10-06 21:11:24,755 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,763 p=28169 u=zuul n=ansible | TASK [openshift_setup : Login into OpenShift internal registry output_dir={{ cifmw_openshift_setup_basedir }}/artifacts, script=podman login -u {{ cifmw_openshift_user }} -p {{ cifmw_openshift_token }} {%- if cifmw_openshift_setup_skip_internal_registry_tls_verify|bool %} --tls-verify=false {%- endif %} {{ cifmw_openshift_setup_registry_default_route.resources[0].spec.host }}] *** 2025-10-06 21:11:24,764 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.028) 0:01:36.416 ******** 2025-10-06 21:11:24,783 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,791 p=28169 u=zuul n=ansible | TASK [Ensure we have custom CA installed on host role=install_ca] ************** 2025-10-06 21:11:24,791 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.027) 0:01:36.444 ******** 2025-10-06 21:11:24,809 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,818 p=28169 u=zuul n=ansible | TASK [openshift_setup : Update ca bundle _raw_params=update-ca-trust extract] *** 2025-10-06 21:11:24,818 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.026) 0:01:36.470 ******** 2025-10-06 21:11:24,839 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,848 p=28169 u=zuul n=ansible | TASK [openshift_setup : Slurp CAs file src={{ cifmw_openshift_setup_ca_bundle_path }}] *** 2025-10-06 21:11:24,848 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.030) 0:01:36.501 ******** 2025-10-06 21:11:24,870 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,883 p=28169 u=zuul n=ansible | TASK [openshift_setup : Create config map with registry CAs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'v1', 'kind': 'ConfigMap', 'metadata': {'namespace': 'openshift-config', 'name': 'registry-cas'}, 'data': '{{ _config_map_data | items2dict }}'}] *** 2025-10-06 21:11:24,883 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.034) 0:01:36.535 ******** 2025-10-06 21:11:24,905 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,914 p=28169 u=zuul n=ansible | TASK [openshift_setup : Install Red Hat CA for pulling images from internal registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'additionalTrustedCA': {'name': 'registry-cas'}}}] *** 2025-10-06 21:11:24,914 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.030) 0:01:36.566 ******** 2025-10-06 21:11:24,936 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,944 p=28169 u=zuul n=ansible | TASK [openshift_setup : Add insecure registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'registrySources': {'insecureRegistries': ['{{ cifmw_update_containers_registry }}'], 'allowedRegistries': '{{ all_registries }}'}}}] *** 2025-10-06 21:11:24,944 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.030) 0:01:36.596 ******** 2025-10-06 21:11:25,645 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:25,653 p=28169 u=zuul n=ansible | TASK [openshift_setup : Create a ICSP with repository digest mirrors kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'operator.openshift.io/v1alpha1', 'kind': 'ImageContentSourcePolicy', 'metadata': {'name': 'registry-digest-mirrors'}, 'spec': {'repositoryDigestMirrors': '{{ cifmw_openshift_setup_digest_mirrors }}'}}] *** 2025-10-06 21:11:25,653 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:25 +0000 (0:00:00.708) 0:01:37.305 ******** 2025-10-06 21:11:25,679 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:25,689 p=28169 u=zuul n=ansible | TASK [openshift_setup : Metal3 tweaks _raw_params=metal3_config.yml] *********** 2025-10-06 21:11:25,689 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:25 +0000 (0:00:00.036) 0:01:37.341 ******** 2025-10-06 21:11:25,716 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_setup/tasks/metal3_config.yml for localhost 2025-10-06 21:11:25,731 p=28169 u=zuul n=ansible | TASK [openshift_setup : Fetch Metal3 configuration name _raw_params=oc get Provisioning -o name] *** 2025-10-06 21:11:25,731 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:25 +0000 (0:00:00.041) 0:01:37.383 ******** 2025-10-06 21:11:25,747 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:25,756 p=28169 u=zuul n=ansible | TASK [openshift_setup : Apply the patch to Metal3 Provisioning _raw_params=oc patch {{ _cifmw_openshift_setup_provisioning_name.stdout }} --type='json' -p='[{"op": "replace", "path": "/spec/watchAllNamespaces", "value": true}]'] *** 2025-10-06 21:11:25,757 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:25 +0000 (0:00:00.025) 0:01:37.409 ******** 2025-10-06 21:11:25,769 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:25,778 p=28169 u=zuul n=ansible | TASK [openshift_setup : Gather network.operator info kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=operator.openshift.io/v1, kind=Network, name=cluster] *** 2025-10-06 21:11:25,778 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:25 +0000 (0:00:00.021) 0:01:37.430 ******** 2025-10-06 21:11:26,688 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:26,707 p=28169 u=zuul n=ansible | TASK [openshift_setup : Patch network operator api_version=operator.openshift.io/v1, kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Network, name=cluster, persist_config=True, patch=[{'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/routingViaHost', 'value': True, 'op': 'replace'}, {'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/ipForwarding', 'value': 'Global', 'op': 'replace'}]] *** 2025-10-06 21:11:26,707 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:26 +0000 (0:00:00.928) 0:01:38.359 ******** 2025-10-06 21:11:27,657 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:27,665 p=28169 u=zuul n=ansible | TASK [openshift_setup : Patch samples registry configuration kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=samples.operator.openshift.io/v1, kind=Config, name=cluster, patch=[{'op': 'replace', 'path': '/spec/samplesRegistry', 'value': 'registry.redhat.io'}]] *** 2025-10-06 21:11:27,665 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:27 +0000 (0:00:00.958) 0:01:39.318 ******** 2025-10-06 21:11:28,419 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:28,438 p=28169 u=zuul n=ansible | TASK [openshift_setup : Delete the pods from openshift-marketplace namespace kind=Pod, state=absent, delete_all=True, kubeconfig={{ cifmw_openshift_kubeconfig }}, namespace=openshift-marketplace] *** 2025-10-06 21:11:28,438 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.772) 0:01:40.091 ******** 2025-10-06 21:11:28,459 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:28,477 p=28169 u=zuul n=ansible | TASK [openshift_setup : Wait for openshift-marketplace pods to be running _raw_params=oc wait pod --all --for=condition=Ready -n openshift-marketplace --timeout=1m] *** 2025-10-06 21:11:28,477 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.038) 0:01:40.129 ******** 2025-10-06 21:11:28,496 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:28,515 p=28169 u=zuul n=ansible | TASK [Deploy Observability operator. name=openshift_obs] *********************** 2025-10-06 21:11:28,515 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.038) 0:01:40.168 ******** 2025-10-06 21:11:28,537 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:28,548 p=28169 u=zuul n=ansible | TASK [Deploy Metal3 BMHs name=deploy_bmh] ************************************** 2025-10-06 21:11:28,548 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.032) 0:01:40.200 ******** 2025-10-06 21:11:28,576 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:28,588 p=28169 u=zuul n=ansible | TASK [Install certmanager operator role name=cert_manager] ********************* 2025-10-06 21:11:28,588 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.040) 0:01:40.240 ******** 2025-10-06 21:11:28,694 p=28169 u=zuul n=ansible | TASK [cert_manager : Create role needed directories path={{ cifmw_cert_manager_manifests_dir }}, state=directory, mode=0755] *** 2025-10-06 21:11:28,694 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.106) 0:01:40.347 ******** 2025-10-06 21:11:28,908 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:28,915 p=28169 u=zuul n=ansible | TASK [cert_manager : Create the cifmw_cert_manager_operator_namespace namespace" kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ cifmw_cert_manager_operator_namespace }}, kind=Namespace, state=present] *** 2025-10-06 21:11:28,915 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.220) 0:01:40.567 ******** 2025-10-06 21:11:29,624 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:29,632 p=28169 u=zuul n=ansible | TASK [cert_manager : Install from Release Manifest _raw_params=release_manifest.yml] *** 2025-10-06 21:11:29,632 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:29 +0000 (0:00:00.717) 0:01:41.285 ******** 2025-10-06 21:11:29,662 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/cert_manager/tasks/release_manifest.yml for localhost 2025-10-06 21:11:29,674 p=28169 u=zuul n=ansible | TASK [cert_manager : Download release manifests url={{ cifmw_cert_manager_release_manifest }}, dest={{ cifmw_cert_manager_manifests_dir }}/cert_manager_manifest.yml, mode=0664] *** 2025-10-06 21:11:29,674 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:29 +0000 (0:00:00.041) 0:01:41.326 ******** 2025-10-06 21:11:30,233 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:30,241 p=28169 u=zuul n=ansible | TASK [cert_manager : Install cert-manager from release manifest kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, state=present, src={{ cifmw_cert_manager_manifests_dir }}/cert_manager_manifest.yml] *** 2025-10-06 21:11:30,241 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:30 +0000 (0:00:00.566) 0:01:41.893 ******** 2025-10-06 21:11:32,722 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:32,740 p=28169 u=zuul n=ansible | TASK [cert_manager : Install from OLM Manifest _raw_params=olm_manifest.yml] *** 2025-10-06 21:11:32,740 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:32 +0000 (0:00:02.499) 0:01:44.393 ******** 2025-10-06 21:11:32,754 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:32,764 p=28169 u=zuul n=ansible | TASK [cert_manager : Check for cert-manager namspeace existance kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name=cert-manager, kind=Namespace, field_selectors=['status.phase=Active']] *** 2025-10-06 21:11:32,764 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:32 +0000 (0:00:00.024) 0:01:44.417 ******** 2025-10-06 21:11:33,434 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:33,442 p=28169 u=zuul n=ansible | TASK [cert_manager : Wait for cert-manager pods to be ready kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, namespace=cert-manager, kind=Pod, wait=True, wait_sleep=10, wait_timeout=600, wait_condition={'type': 'Ready', 'status': 'True'}, label_selectors=['app = {{ item }}']] *** 2025-10-06 21:11:33,442 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:33 +0000 (0:00:00.677) 0:01:45.095 ******** 2025-10-06 21:11:44,189 p=28169 u=zuul n=ansible | ok: [localhost] => (item=cainjector) 2025-10-06 21:11:44,912 p=28169 u=zuul n=ansible | ok: [localhost] => (item=webhook) 2025-10-06 21:11:45,592 p=28169 u=zuul n=ansible | ok: [localhost] => (item=cert-manager) 2025-10-06 21:11:45,607 p=28169 u=zuul n=ansible | TASK [cert_manager : Create $HOME/bin dir path={{ lookup('env', 'HOME') }}/bin, state=directory, mode=0755] *** 2025-10-06 21:11:45,607 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:45 +0000 (0:00:12.165) 0:01:57.260 ******** 2025-10-06 21:11:45,793 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:45,801 p=28169 u=zuul n=ansible | TASK [cert_manager : Install cert-manager cmctl CLI url=https://github.com/cert-manager/cmctl/releases/{{ cifmw_cert_manager_version }}/download/cmctl_{{ _os }}_{{ _arch }}, dest={{ lookup('env', 'HOME') }}/bin/cmctl, mode=0755] *** 2025-10-06 21:11:45,801 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:45 +0000 (0:00:00.194) 0:01:57.454 ******** 2025-10-06 21:11:47,045 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:47,055 p=28169 u=zuul n=ansible | TASK [cert_manager : Verify cert_manager api _raw_params={{ lookup('env', 'HOME') }}/bin/cmctl check api --wait=2m] *** 2025-10-06 21:11:47,055 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:01.253) 0:01:58.707 ******** 2025-10-06 21:11:47,363 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:47,377 p=28169 u=zuul n=ansible | TASK [Configure hosts networking using nmstate name=ci_nmstate] **************** 2025-10-06 21:11:47,377 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.321) 0:01:59.029 ******** 2025-10-06 21:11:47,401 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:47,412 p=28169 u=zuul n=ansible | TASK [Configure multus networks name=ci_multus] ******************************** 2025-10-06 21:11:47,412 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.035) 0:01:59.065 ******** 2025-10-06 21:11:47,433 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:47,445 p=28169 u=zuul n=ansible | TASK [Deploy Sushy Emulator service pod name=sushy_emulator] ******************* 2025-10-06 21:11:47,446 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.033) 0:01:59.098 ******** 2025-10-06 21:11:47,468 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:47,479 p=28169 u=zuul n=ansible | TASK [Setup Libvirt on controller name=libvirt_manager] ************************ 2025-10-06 21:11:47,479 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.033) 0:01:59.131 ******** 2025-10-06 21:11:47,499 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:47,507 p=28169 u=zuul n=ansible | TASK [Prepare container package builder name=pkg_build] ************************ 2025-10-06 21:11:47,507 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.028) 0:01:59.160 ******** 2025-10-06 21:11:47,535 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:47,543 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:11:47,543 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.036) 0:01:59.196 ******** 2025-10-06 21:11:47,607 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:47,618 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:11:47,618 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.074) 0:01:59.271 ******** 2025-10-06 21:11:47,707 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:47,716 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_infra _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:11:47,716 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.097) 0:01:59.368 ******** 2025-10-06 21:11:47,828 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': 'Fetch nodes facts and save them as parameters', 'type': 'playbook', 'inventory': '/home/zuul/ci-framework-data/artifacts/zuul_inventory.yml', 'source': 'fetch_compute_facts.yml'}) 2025-10-06 21:11:47,839 p=28169 u=zuul n=ansible | TASK [run_hook : Set playbook path for Fetch nodes facts and save them as parameters cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e operator_namespace={{ _operator_namespace }} -e namespace={{ _namespace}} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2025-10-06 21:11:47,839 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.123) 0:01:59.492 ******** 2025-10-06 21:11:47,880 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:47,888 p=28169 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2025-10-06 21:11:47,888 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.048) 0:01:59.540 ******** 2025-10-06 21:11:48,069 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:48,077 p=28169 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2025-10-06 21:11:48,078 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.189) 0:01:59.730 ******** 2025-10-06 21:11:48,091 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:48,101 p=28169 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2025-10-06 21:11:48,101 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.023) 0:01:59.754 ******** 2025-10-06 21:11:48,293 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:48,304 p=28169 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2025-10-06 21:11:48,304 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.202) 0:01:59.957 ******** 2025-10-06 21:11:48,322 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:48,333 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2025-10-06 21:11:48,333 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.028) 0:01:59.986 ******** 2025-10-06 21:11:48,514 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:48,522 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:11:48,522 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.188) 0:02:00.174 ******** 2025-10-06 21:11:48,693 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:48,705 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook without retry - Fetch nodes facts and save them as parameters] *** 2025-10-06 21:11:48,706 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.183) 0:02:00.358 ******** 2025-10-06 21:11:48,760 p=28169 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_002_run_hook_without_retry_fetch.log 2025-10-06 21:11:59,247 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:59,259 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook with retry - Fetch nodes facts and save them as parameters] *** 2025-10-06 21:11:59,260 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:10.553) 0:02:10.912 ******** 2025-10-06 21:11:59,278 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:59,286 p=28169 u=zuul n=ansible | TASK [run_hook : Check if we have a file path={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:11:59,286 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.026) 0:02:10.938 ******** 2025-10-06 21:11:59,456 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,463 p=28169 u=zuul n=ansible | TASK [run_hook : Load generated content in main playbook file={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:11:59,463 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.177) 0:02:11.116 ******** 2025-10-06 21:11:59,483 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,502 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:11:59,502 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.038) 0:02:11.154 ******** 2025-10-06 21:11:59,553 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,560 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:11:59,560 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.058) 0:02:11.213 ******** 2025-10-06 21:11:59,655 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,663 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_package_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:11:59,663 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.102) 0:02:11.316 ******** 2025-10-06 21:11:59,749 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:59,762 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2025-10-06 21:11:59,762 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.098) 0:02:11.414 ******** 2025-10-06 21:11:59,840 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,848 p=28169 u=zuul n=ansible | TASK [pkg_build : Generate volume list build_volumes={% for pkg in cifmw_pkg_build_list -%} - "{{ pkg.src|default(cifmw_pkg_build_pkg_basedir ~ '/' ~ pkg.name) }}:/root/src/{{ pkg.name }}:z" - "{{ cifmw_pkg_build_basedir }}/volumes/packages/{{ pkg.name }}:/root/{{ pkg.name }}:z" - "{{ cifmw_pkg_build_basedir }}/logs/build_{{ pkg.name }}:/root/logs:z" {% endfor -%} - "{{ cifmw_pkg_build_basedir }}/volumes/packages/gating_repo:/root/gating_repo:z" - "{{ cifmw_pkg_build_basedir }}/artifacts/repositories:/root/yum.repos.d:z,ro" - "{{ cifmw_pkg_build_basedir }}/artifacts/build-packages.yml:/root/playbook.yml:z,ro" ] *** 2025-10-06 21:11:59,849 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.086) 0:02:11.501 ******** 2025-10-06 21:11:59,870 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:59,878 p=28169 u=zuul n=ansible | TASK [pkg_build : Build package using container name={{ pkg.name }}-builder, auto_remove=True, detach=False, privileged=True, log_driver=k8s-file, log_level=info, log_opt={'path': '{{ cifmw_pkg_build_basedir }}/logs/{{ pkg.name }}-builder.log'}, image={{ cifmw_pkg_build_ctx_name }}, volume={{ build_volumes | from_yaml }}, security_opt=['label=disable', 'seccomp=unconfined', 'apparmor=unconfined'], env={'PROJECT': '{{ pkg.name }}'}, command=ansible-playbook -i localhost, -c local playbook.yml] *** 2025-10-06 21:11:59,878 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.029) 0:02:11.531 ******** 2025-10-06 21:11:59,890 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:59,903 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:11:59,903 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.024) 0:02:11.556 ******** 2025-10-06 21:11:59,965 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,972 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:11:59,973 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.069) 0:02:11.625 ******** 2025-10-06 21:12:00,064 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,072 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_package_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:00,072 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.099) 0:02:11.725 ******** 2025-10-06 21:12:00,161 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:00,203 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:12:00,203 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.130) 0:02:11.855 ******** 2025-10-06 21:12:00,253 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,266 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:12:00,266 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.062) 0:02:11.918 ******** 2025-10-06 21:12:00,362 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,380 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_container_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:00,380 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.114) 0:02:12.032 ******** 2025-10-06 21:12:00,476 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:00,489 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2025-10-06 21:12:00,489 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.109) 0:02:12.142 ******** 2025-10-06 21:12:00,602 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,610 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Nothing to do yet msg=No support for that step yet] ******** 2025-10-06 21:12:00,610 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.120) 0:02:12.262 ******** 2025-10-06 21:12:00,625 p=28169 u=zuul n=ansible | ok: [localhost] => msg: No support for that step yet 2025-10-06 21:12:00,633 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:12:00,633 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.023) 0:02:12.285 ******** 2025-10-06 21:12:00,684 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,693 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:12:00,693 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.059) 0:02:12.345 ******** 2025-10-06 21:12:00,824 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,832 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_container_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:00,832 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.139) 0:02:12.485 ******** 2025-10-06 21:12:00,924 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:00,943 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:12:00,943 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.110) 0:02:12.596 ******** 2025-10-06 21:12:00,996 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,004 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:12:01,004 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.061) 0:02:12.657 ******** 2025-10-06 21:12:01,102 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,110 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_operator_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:01,110 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.105) 0:02:12.763 ******** 2025-10-06 21:12:01,203 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,230 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2025-10-06 21:12:01,231 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.120) 0:02:12.883 ******** 2025-10-06 21:12:01,283 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,291 p=28169 u=zuul n=ansible | TASK [operator_build : Ensure mandatory directories exist path={{ cifmw_operator_build_basedir }}/{{ item }}, state=directory, mode=0755] *** 2025-10-06 21:12:01,291 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.060) 0:02:12.943 ******** 2025-10-06 21:12:01,316 p=28169 u=zuul n=ansible | skipping: [localhost] => (item=artifacts) 2025-10-06 21:12:01,320 p=28169 u=zuul n=ansible | skipping: [localhost] => (item=logs) 2025-10-06 21:12:01,321 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,331 p=28169 u=zuul n=ansible | TASK [operator_build : Initialize role output cifmw_operator_build_output={{ cifmw_operator_build_output }}, cifmw_operator_build_meta_name={{ cifmw_operator_build_meta_name }}] *** 2025-10-06 21:12:01,331 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.039) 0:02:12.983 ******** 2025-10-06 21:12:01,351 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,359 p=28169 u=zuul n=ansible | TASK [operator_build : Populate operators list with zuul info _raw_params=zuul_info.yml] *** 2025-10-06 21:12:01,360 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.028) 0:02:13.012 ******** 2025-10-06 21:12:01,385 p=28169 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'main', 'change': '287', 'change_url': 'https://github.com/openstack-k8s-operators/watcher-operator/pull/287', 'commit_id': '14377136e67c9cd67507a059bfde2f19f140387d', 'patchset': '14377136e67c9cd67507a059bfde2f19f140387d', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/openstack-k8s-operators/watcher-operator', 'name': 'openstack-k8s-operators/watcher-operator', 'short_name': 'watcher-operator', 'src_dir': 'src/github.com/openstack-k8s-operators/watcher-operator'}, 'topic': None}) 2025-10-06 21:12:01,386 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,394 p=28169 u=zuul n=ansible | TASK [operator_build : Merge lists of operators operators_list={{ [cifmw_operator_build_operators, zuul_info_operators | default([])] | community.general.lists_mergeby('name') }}] *** 2025-10-06 21:12:01,394 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.034) 0:02:13.046 ******** 2025-10-06 21:12:01,418 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,426 p=28169 u=zuul n=ansible | TASK [operator_build : Get meta_operator src dir from operators_list cifmw_operator_build_meta_src={{ (operators_list | selectattr('name', 'eq', cifmw_operator_build_meta_name) | map(attribute='src') | first ) | default(cifmw_operator_build_meta_src, true) }}] *** 2025-10-06 21:12:01,427 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.032) 0:02:13.079 ******** 2025-10-06 21:12:01,449 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,456 p=28169 u=zuul n=ansible | TASK [operator_build : Adds meta-operator to the list operators_list={{ [operators_list, meta_operator_info] | community.general.lists_mergeby('name') }}] *** 2025-10-06 21:12:01,456 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.029) 0:02:13.109 ******** 2025-10-06 21:12:01,478 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,486 p=28169 u=zuul n=ansible | TASK [operator_build : Clone operator's code when src dir is empty _raw_params=clone.yml] *** 2025-10-06 21:12:01,486 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.029) 0:02:13.138 ******** 2025-10-06 21:12:01,508 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,516 p=28169 u=zuul n=ansible | TASK [operator_build : Building operators _raw_params=build.yml] *************** 2025-10-06 21:12:01,516 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.029) 0:02:13.168 ******** 2025-10-06 21:12:01,538 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,546 p=28169 u=zuul n=ansible | TASK [operator_build : Building meta operator _raw_params=build.yml] *********** 2025-10-06 21:12:01,546 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.030) 0:02:13.199 ******** 2025-10-06 21:12:01,570 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,578 p=28169 u=zuul n=ansible | TASK [operator_build : Gather role output dest={{ cifmw_operator_build_basedir }}/artifacts/custom-operators.yml, content={{ cifmw_operator_build_output | to_nice_yaml }}, mode=0644] *** 2025-10-06 21:12:01,578 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.031) 0:02:13.230 ******** 2025-10-06 21:12:01,599 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,613 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:12:01,613 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.035) 0:02:13.266 ******** 2025-10-06 21:12:01,674 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,684 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:12:01,684 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.071) 0:02:13.337 ******** 2025-10-06 21:12:01,783 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,795 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_operator_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:01,795 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.110) 0:02:13.447 ******** 2025-10-06 21:12:01,890 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,910 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:12:01,910 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.115) 0:02:13.563 ******** 2025-10-06 21:12:01,965 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,975 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:12:01,975 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.064) 0:02:13.627 ******** 2025-10-06 21:12:02,072 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,081 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_deploy _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:02,082 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.106) 0:02:13.734 ******** 2025-10-06 21:12:02,204 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': '80 Kustomize OpenStack CR', 'type': 'playbook', 'source': 'control_plane_horizon.yml'}) 2025-10-06 21:12:02,214 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': 'Create coo subscription', 'type': 'playbook', 'source': '/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/playbooks/deploy_cluster_observability_operator.yaml'}) 2025-10-06 21:12:02,229 p=28169 u=zuul n=ansible | TASK [run_hook : Set playbook path for 80 Kustomize OpenStack CR cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e operator_namespace={{ _operator_namespace }} -e namespace={{ _namespace}} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2025-10-06 21:12:02,229 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.147) 0:02:13.882 ******** 2025-10-06 21:12:02,274 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,283 p=28169 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2025-10-06 21:12:02,283 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.053) 0:02:13.936 ******** 2025-10-06 21:12:02,481 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,490 p=28169 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2025-10-06 21:12:02,490 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.206) 0:02:14.142 ******** 2025-10-06 21:12:02,521 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:02,531 p=28169 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2025-10-06 21:12:02,531 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.040) 0:02:14.183 ******** 2025-10-06 21:12:02,717 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,725 p=28169 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2025-10-06 21:12:02,725 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.194) 0:02:14.378 ******** 2025-10-06 21:12:02,766 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,774 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2025-10-06 21:12:02,774 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.048) 0:02:14.427 ******** 2025-10-06 21:12:02,971 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,984 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:12:02,985 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.210) 0:02:14.637 ******** 2025-10-06 21:12:03,166 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:03,179 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook without retry - 80 Kustomize OpenStack CR] *********** 2025-10-06 21:12:03,179 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:03 +0000 (0:00:00.194) 0:02:14.831 ******** 2025-10-06 21:12:03,242 p=28169 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_003_run_hook_without_retry_80.log 2025-10-06 21:12:04,924 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:12:04,941 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook with retry - 80 Kustomize OpenStack CR] ************** 2025-10-06 21:12:04,942 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:04 +0000 (0:00:01.762) 0:02:16.594 ******** 2025-10-06 21:12:04,965 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:04,973 p=28169 u=zuul n=ansible | TASK [run_hook : Check if we have a file path={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:12:04,974 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:04 +0000 (0:00:00.031) 0:02:16.626 ******** 2025-10-06 21:12:05,165 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,177 p=28169 u=zuul n=ansible | TASK [run_hook : Load generated content in main playbook file={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:12:05,177 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.203) 0:02:16.829 ******** 2025-10-06 21:12:05,198 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:05,212 p=28169 u=zuul n=ansible | TASK [run_hook : Set playbook path for Create coo subscription cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e operator_namespace={{ _operator_namespace }} -e namespace={{ _namespace}} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2025-10-06 21:12:05,212 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.035) 0:02:16.864 ******** 2025-10-06 21:12:05,255 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,267 p=28169 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2025-10-06 21:12:05,267 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.055) 0:02:16.919 ******** 2025-10-06 21:12:05,451 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,459 p=28169 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2025-10-06 21:12:05,459 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.191) 0:02:17.111 ******** 2025-10-06 21:12:05,479 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:05,487 p=28169 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2025-10-06 21:12:05,487 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.028) 0:02:17.140 ******** 2025-10-06 21:12:05,662 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,682 p=28169 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2025-10-06 21:12:05,682 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.194) 0:02:17.334 ******** 2025-10-06 21:12:05,707 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,719 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2025-10-06 21:12:05,720 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.037) 0:02:17.372 ******** 2025-10-06 21:12:05,901 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,924 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:12:05,924 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.204) 0:02:17.576 ******** 2025-10-06 21:12:06,112 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:06,127 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook without retry - Create coo subscription] ************* 2025-10-06 21:12:06,127 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:06 +0000 (0:00:00.202) 0:02:17.779 ******** 2025-10-06 21:12:06,184 p=28169 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_004_run_hook_without_retry_create.log 2025-10-06 21:12:07,321 p=28169 u=zuul n=ansible | fatal: [localhost]: FAILED! => censored: 'the output has been hidden due to the fact that ''no_log: true'' was specified for this result' changed: true 2025-10-06 21:12:07,322 p=28169 u=zuul n=ansible | NO MORE HOSTS LEFT ************************************************************* 2025-10-06 21:12:07,323 p=28169 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-10-06 21:12:07,323 p=28169 u=zuul n=ansible | localhost : ok=147 changed=44 unreachable=0 failed=1 skipped=95 rescued=0 ignored=1 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:07 +0000 (0:00:01.196) 0:02:18.976 ******** 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | =============================================================================== 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | run_hook : Run hook without retry - Download needed tools -------------- 30.81s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | ci_setup : Install needed packages ------------------------------------- 25.45s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | cert_manager : Wait for cert-manager pods to be ready ------------------ 12.17s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | run_hook : Run hook without retry - Fetch nodes facts and save them as parameters -- 10.55s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | repo_setup : Initialize python venv and install requirements ------------ 8.40s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | ci_setup : Install openshift client ------------------------------------- 5.11s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | cert_manager : Install cert-manager from release manifest --------------- 2.50s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | run_hook : Run hook without retry - 80 Kustomize OpenStack CR ----------- 1.76s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | openshift_setup : Create required namespaces ---------------------------- 1.58s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | install_ca : Update ca bundle ------------------------------------------- 1.43s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | cert_manager : Install cert-manager cmctl CLI --------------------------- 1.25s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | run_hook : Run hook without retry - Create coo subscription ------------- 1.20s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | repo_setup : Get repo-setup repository ---------------------------------- 1.18s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | ci_setup : Manage directories ------------------------------------------- 1.09s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | repo_setup : Make sure git-core package is installed -------------------- 1.00s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | openshift_setup : Patch network operator -------------------------------- 0.96s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | openshift_setup : Gather network.operator info -------------------------- 0.93s 2025-10-06 21:12:07,325 p=28169 u=zuul n=ansible | Gathering Facts --------------------------------------------------------- 0.93s 2025-10-06 21:12:07,325 p=28169 u=zuul n=ansible | repo_setup : Install repo-setup package --------------------------------- 0.85s 2025-10-06 21:12:07,325 p=28169 u=zuul n=ansible | openshift_setup : Patch samples registry configuration ------------------ 0.77s home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_000_run_hook_without_retry.log0000644000175000017500000001737215071030165031314 0ustar zuulzuul[WARNING]: Found variable using reserved name: namespace PLAY [Download tools] ********************************************************** TASK [download_tools : Install build dependencies name=['jq', 'skopeo', 'sqlite', 'httpd-tools', 'virt-install', 'gcc', 'python3-jinja2', 'xmlstarlet', 'openssl']] *** Monday 06 October 2025 21:10:47 +0000 (0:00:00.037) 0:00:00.037 ******** changed: [localhost] TASK [download_tools : Set opm download url suffix opm_url_suffix=latest/download] *** Monday 06 October 2025 21:10:51 +0000 (0:00:04.371) 0:00:04.409 ******** skipping: [localhost] TASK [download_tools : Set opm download url suffix opm_url_suffix=download/{{ opm_version }}] *** Monday 06 October 2025 21:10:52 +0000 (0:00:00.028) 0:00:04.437 ******** ok: [localhost] TASK [download_tools : Create $HOME/bin dir path={{ lookup('env', 'HOME') }}/bin, state=directory, mode=0755] *** Monday 06 October 2025 21:10:52 +0000 (0:00:00.030) 0:00:04.467 ******** ok: [localhost] TASK [download_tools : Download opm url=https://github.com/operator-framework/operator-registry/releases/{{ opm_url_suffix }}/linux-amd64-opm, dest={{ lookup('env', 'HOME') }}/bin/opm, mode=0755, timeout=30] *** Monday 06 October 2025 21:10:52 +0000 (0:00:00.298) 0:00:04.766 ******** changed: [localhost] TASK [download_tools : Get version from sdk_version _sdk_version={{ sdk_version | regex_search('v(.*)', '\1') | first }}] *** Monday 06 October 2025 21:10:53 +0000 (0:00:01.000) 0:00:05.766 ******** ok: [localhost] TASK [download_tools : Set operator-sdk file for version < 1.3.0 _operator_sdk_file=operator-sdk-{{ sdk_version }}-x86_64-linux-gnu] *** Monday 06 October 2025 21:10:53 +0000 (0:00:00.040) 0:00:05.807 ******** skipping: [localhost] TASK [download_tools : Set operator-sdk file for version >= 1.3.0 _operator_sdk_file=operator-sdk_linux_amd64] *** Monday 06 October 2025 21:10:53 +0000 (0:00:00.038) 0:00:05.846 ******** ok: [localhost] TASK [download_tools : Download operator-sdk url=https://github.com/operator-framework/operator-sdk/releases/download/{{ sdk_version }}/{{ _operator_sdk_file }}, dest={{ lookup('env', 'HOME') }}/bin/operator-sdk, mode=0755, force=True, timeout=30] *** Monday 06 October 2025 21:10:53 +0000 (0:00:00.060) 0:00:05.906 ******** changed: [localhost] TASK [download_tools : Download and extract kustomize src=https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2F{{ kustomize_version }}/kustomize_{{ kustomize_version }}_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True] *** Monday 06 October 2025 21:10:54 +0000 (0:00:01.169) 0:00:07.076 ******** changed: [localhost] TASK [download_tools : Download kubectl url=https://dl.k8s.io/release/{{ kubectl_version }}/bin/linux/amd64/kubectl, dest={{ lookup('env', 'HOME') }}/bin/kubectl, mode=0755, timeout=30] *** Monday 06 October 2025 21:10:56 +0000 (0:00:01.408) 0:00:08.484 ******** ok: [localhost] TASK [download_tools : Download kuttl url=https://github.com/kudobuilder/kuttl/releases/download/v{{ kuttl_version }}/kubectl-kuttl_{{ kuttl_version }}_linux_x86_64, dest={{ lookup('env', 'HOME') }}/bin/kubectl-kuttl, mode=0755, timeout=30] *** Monday 06 October 2025 21:10:56 +0000 (0:00:00.465) 0:00:08.950 ******** changed: [localhost] TASK [download_tools : Download chainsaw src=https://github.com/kyverno/chainsaw/releases/download/v{{ chainsaw_version }}/chainsaw_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True, extra_opts=['--exclude', 'README.md', '--exclude', 'LICENSE']] *** Monday 06 October 2025 21:10:57 +0000 (0:00:00.839) 0:00:09.790 ******** changed: [localhost] TASK [download_tools : Download and extract yq src=https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True, mode=0755] *** Monday 06 October 2025 21:11:00 +0000 (0:00:03.318) 0:00:13.108 ******** changed: [localhost] TASK [download_tools : Link yq_linux_amd64 as yq src={{ lookup('env', 'HOME') }}/bin/yq_linux_amd64, dest={{ lookup('env', 'HOME') }}/bin/yq, state=link] *** Monday 06 October 2025 21:11:01 +0000 (0:00:01.185) 0:00:14.294 ******** changed: [localhost] TASK [download_tools : Deinstall golang state=absent, name=['golang-bin', 'golang-src', 'golang']] *** Monday 06 October 2025 21:11:02 +0000 (0:00:00.190) 0:00:14.484 ******** ok: [localhost] TASK [download_tools : Delete old go version installed from upstream path={{ item }}, state=absent] *** Monday 06 October 2025 21:11:03 +0000 (0:00:01.101) 0:00:15.585 ******** ok: [localhost] => (item=/usr/local/go) ok: [localhost] => (item=/home/zuul/bin/go) ok: [localhost] => (item=/home/zuul/bin/gofmt) ok: [localhost] => (item=/usr/local/bin/go) ok: [localhost] => (item=/usr/local/bin/gofmt) TASK [download_tools : Download and extract golang src=https://golang.org/dl/go{{ go_version }}.linux-amd64.tar.gz, dest=/usr/local, remote_src=True, extra_opts=['--exclude', 'go/misc', '--exclude', 'go/pkg/linux_amd64_race', '--exclude', 'go/test']] *** Monday 06 October 2025 21:11:04 +0000 (0:00:01.042) 0:00:16.628 ******** changed: [localhost] TASK [download_tools : Set alternatives link to installed go version _raw_params=set -e update-alternatives --install /usr/local/bin/{{ item }} {{ item }} /usr/local/go/bin/{{ item }} 1 ] *** Monday 06 October 2025 21:11:17 +0000 (0:00:12.881) 0:00:29.509 ******** changed: [localhost] => (item=go) changed: [localhost] => (item=gofmt) TASK [download_tools : Clean bash cache msg=When move from rpm to upstream version, make sure to clean bash cache using `hash -d go`] *** Monday 06 October 2025 21:11:17 +0000 (0:00:00.515) 0:00:30.025 ******** ok: [localhost] => msg: When move from rpm to upstream version, make sure to clean bash cache using `hash -d go` PLAY RECAP ********************************************************************* localhost : ok=18 changed=10 unreachable=0 failed=0 skipped=2 rescued=0 ignored=0 Monday 06 October 2025 21:11:17 +0000 (0:00:00.058) 0:00:30.083 ******** =============================================================================== download_tools : Download and extract golang --------------------------- 12.88s download_tools : Install build dependencies ----------------------------- 4.37s download_tools : Download chainsaw -------------------------------------- 3.32s download_tools : Download and extract kustomize ------------------------- 1.41s download_tools : Download and extract yq -------------------------------- 1.19s download_tools : Download operator-sdk ---------------------------------- 1.17s download_tools : Deinstall golang --------------------------------------- 1.10s download_tools : Delete old go version installed from upstream ---------- 1.04s download_tools : Download opm ------------------------------------------- 1.00s download_tools : Download kuttl ----------------------------------------- 0.84s download_tools : Set alternatives link to installed go version ---------- 0.52s download_tools : Download kubectl --------------------------------------- 0.47s download_tools : Create $HOME/bin dir ----------------------------------- 0.30s download_tools : Link yq_linux_amd64 as yq ------------------------------ 0.19s download_tools : Set operator-sdk file for version >= 1.3.0 ------------- 0.06s download_tools : Clean bash cache --------------------------------------- 0.06s download_tools : Get version from sdk_version --------------------------- 0.04s download_tools : Set operator-sdk file for version < 1.3.0 -------------- 0.04s download_tools : Set opm download url suffix ---------------------------- 0.03s download_tools : Set opm download url suffix ---------------------------- 0.03s home/zuul/zuul-output/logs/ci-framework-data/logs/pre_infra_download_needed_tools.log0000644000175000017500000003040615071030165030413 0ustar zuulzuul2025-10-06 21:10:47,587 p=29044 u=zuul n=ansible | [WARNING]: Found variable using reserved name: namespace 2025-10-06 21:10:47,588 p=29044 u=zuul n=ansible | PLAY [Download tools] ********************************************************** 2025-10-06 21:10:47,622 p=29044 u=zuul n=ansible | TASK [download_tools : Install build dependencies name=['jq', 'skopeo', 'sqlite', 'httpd-tools', 'virt-install', 'gcc', 'python3-jinja2', 'xmlstarlet', 'openssl']] *** 2025-10-06 21:10:47,622 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:47 +0000 (0:00:00.037) 0:00:00.037 ******** 2025-10-06 21:10:51,981 p=29044 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:51,993 p=29044 u=zuul n=ansible | TASK [download_tools : Set opm download url suffix opm_url_suffix=latest/download] *** 2025-10-06 21:10:51,993 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:51 +0000 (0:00:04.371) 0:00:04.409 ******** 2025-10-06 21:10:52,012 p=29044 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:52,021 p=29044 u=zuul n=ansible | TASK [download_tools : Set opm download url suffix opm_url_suffix=download/{{ opm_version }}] *** 2025-10-06 21:10:52,021 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:52 +0000 (0:00:00.028) 0:00:04.437 ******** 2025-10-06 21:10:52,042 p=29044 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:52,051 p=29044 u=zuul n=ansible | TASK [download_tools : Create $HOME/bin dir path={{ lookup('env', 'HOME') }}/bin, state=directory, mode=0755] *** 2025-10-06 21:10:52,052 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:52 +0000 (0:00:00.030) 0:00:04.467 ******** 2025-10-06 21:10:52,341 p=29044 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:52,350 p=29044 u=zuul n=ansible | TASK [download_tools : Download opm url=https://github.com/operator-framework/operator-registry/releases/{{ opm_url_suffix }}/linux-amd64-opm, dest={{ lookup('env', 'HOME') }}/bin/opm, mode=0755, timeout=30] *** 2025-10-06 21:10:52,350 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:52 +0000 (0:00:00.298) 0:00:04.766 ******** 2025-10-06 21:10:53,330 p=29044 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:53,351 p=29044 u=zuul n=ansible | TASK [download_tools : Get version from sdk_version _sdk_version={{ sdk_version | regex_search('v(.*)', '\1') | first }}] *** 2025-10-06 21:10:53,351 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:53 +0000 (0:00:01.000) 0:00:05.766 ******** 2025-10-06 21:10:53,381 p=29044 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:53,392 p=29044 u=zuul n=ansible | TASK [download_tools : Set operator-sdk file for version < 1.3.0 _operator_sdk_file=operator-sdk-{{ sdk_version }}-x86_64-linux-gnu] *** 2025-10-06 21:10:53,392 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:53 +0000 (0:00:00.040) 0:00:05.807 ******** 2025-10-06 21:10:53,420 p=29044 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:53,431 p=29044 u=zuul n=ansible | TASK [download_tools : Set operator-sdk file for version >= 1.3.0 _operator_sdk_file=operator-sdk_linux_amd64] *** 2025-10-06 21:10:53,431 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:53 +0000 (0:00:00.038) 0:00:05.846 ******** 2025-10-06 21:10:53,463 p=29044 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:53,491 p=29044 u=zuul n=ansible | TASK [download_tools : Download operator-sdk url=https://github.com/operator-framework/operator-sdk/releases/download/{{ sdk_version }}/{{ _operator_sdk_file }}, dest={{ lookup('env', 'HOME') }}/bin/operator-sdk, mode=0755, force=True, timeout=30] *** 2025-10-06 21:10:53,491 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:53 +0000 (0:00:00.060) 0:00:05.906 ******** 2025-10-06 21:10:54,647 p=29044 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:54,660 p=29044 u=zuul n=ansible | TASK [download_tools : Download and extract kustomize src=https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2F{{ kustomize_version }}/kustomize_{{ kustomize_version }}_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True] *** 2025-10-06 21:10:54,660 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:54 +0000 (0:00:01.169) 0:00:07.076 ******** 2025-10-06 21:10:56,060 p=29044 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:56,069 p=29044 u=zuul n=ansible | TASK [download_tools : Download kubectl url=https://dl.k8s.io/release/{{ kubectl_version }}/bin/linux/amd64/kubectl, dest={{ lookup('env', 'HOME') }}/bin/kubectl, mode=0755, timeout=30] *** 2025-10-06 21:10:56,069 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:56 +0000 (0:00:01.408) 0:00:08.484 ******** 2025-10-06 21:10:56,525 p=29044 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:56,534 p=29044 u=zuul n=ansible | TASK [download_tools : Download kuttl url=https://github.com/kudobuilder/kuttl/releases/download/v{{ kuttl_version }}/kubectl-kuttl_{{ kuttl_version }}_linux_x86_64, dest={{ lookup('env', 'HOME') }}/bin/kubectl-kuttl, mode=0755, timeout=30] *** 2025-10-06 21:10:56,534 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:56 +0000 (0:00:00.465) 0:00:08.950 ******** 2025-10-06 21:10:57,365 p=29044 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:57,374 p=29044 u=zuul n=ansible | TASK [download_tools : Download chainsaw src=https://github.com/kyverno/chainsaw/releases/download/v{{ chainsaw_version }}/chainsaw_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True, extra_opts=['--exclude', 'README.md', '--exclude', 'LICENSE']] *** 2025-10-06 21:10:57,374 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:10:57 +0000 (0:00:00.839) 0:00:09.790 ******** 2025-10-06 21:11:00,684 p=29044 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:00,693 p=29044 u=zuul n=ansible | TASK [download_tools : Download and extract yq src=https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True, mode=0755] *** 2025-10-06 21:11:00,693 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:11:00 +0000 (0:00:03.318) 0:00:13.108 ******** 2025-10-06 21:11:01,870 p=29044 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:01,878 p=29044 u=zuul n=ansible | TASK [download_tools : Link yq_linux_amd64 as yq src={{ lookup('env', 'HOME') }}/bin/yq_linux_amd64, dest={{ lookup('env', 'HOME') }}/bin/yq, state=link] *** 2025-10-06 21:11:01,878 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:11:01 +0000 (0:00:01.185) 0:00:14.294 ******** 2025-10-06 21:11:02,059 p=29044 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:02,069 p=29044 u=zuul n=ansible | TASK [download_tools : Deinstall golang state=absent, name=['golang-bin', 'golang-src', 'golang']] *** 2025-10-06 21:11:02,069 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:11:02 +0000 (0:00:00.190) 0:00:14.484 ******** 2025-10-06 21:11:03,162 p=29044 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:03,170 p=29044 u=zuul n=ansible | TASK [download_tools : Delete old go version installed from upstream path={{ item }}, state=absent] *** 2025-10-06 21:11:03,170 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:11:03 +0000 (0:00:01.101) 0:00:15.585 ******** 2025-10-06 21:11:03,367 p=29044 u=zuul n=ansible | ok: [localhost] => (item=/usr/local/go) 2025-10-06 21:11:03,582 p=29044 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/bin/go) 2025-10-06 21:11:03,773 p=29044 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/bin/gofmt) 2025-10-06 21:11:03,987 p=29044 u=zuul n=ansible | ok: [localhost] => (item=/usr/local/bin/go) 2025-10-06 21:11:04,192 p=29044 u=zuul n=ansible | ok: [localhost] => (item=/usr/local/bin/gofmt) 2025-10-06 21:11:04,212 p=29044 u=zuul n=ansible | TASK [download_tools : Download and extract golang src=https://golang.org/dl/go{{ go_version }}.linux-amd64.tar.gz, dest=/usr/local, remote_src=True, extra_opts=['--exclude', 'go/misc', '--exclude', 'go/pkg/linux_amd64_race', '--exclude', 'go/test']] *** 2025-10-06 21:11:04,212 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:11:04 +0000 (0:00:01.042) 0:00:16.628 ******** 2025-10-06 21:11:17,084 p=29044 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:17,093 p=29044 u=zuul n=ansible | TASK [download_tools : Set alternatives link to installed go version _raw_params=set -e update-alternatives --install /usr/local/bin/{{ item }} {{ item }} /usr/local/go/bin/{{ item }} 1 ] *** 2025-10-06 21:11:17,093 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:11:17 +0000 (0:00:12.881) 0:00:29.509 ******** 2025-10-06 21:11:17,420 p=29044 u=zuul n=ansible | changed: [localhost] => (item=go) 2025-10-06 21:11:17,600 p=29044 u=zuul n=ansible | changed: [localhost] => (item=gofmt) 2025-10-06 21:11:17,609 p=29044 u=zuul n=ansible | TASK [download_tools : Clean bash cache msg=When move from rpm to upstream version, make sure to clean bash cache using `hash -d go`] *** 2025-10-06 21:11:17,609 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:11:17 +0000 (0:00:00.515) 0:00:30.025 ******** 2025-10-06 21:11:17,624 p=29044 u=zuul n=ansible | ok: [localhost] => msg: When move from rpm to upstream version, make sure to clean bash cache using `hash -d go` 2025-10-06 21:11:17,667 p=29044 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | localhost : ok=18 changed=10 unreachable=0 failed=0 skipped=2 rescued=0 ignored=0 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | Monday 06 October 2025 21:11:17 +0000 (0:00:00.058) 0:00:30.083 ******** 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | =============================================================================== 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Download and extract golang --------------------------- 12.88s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Install build dependencies ----------------------------- 4.37s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Download chainsaw -------------------------------------- 3.32s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Download and extract kustomize ------------------------- 1.41s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Download and extract yq -------------------------------- 1.19s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Download operator-sdk ---------------------------------- 1.17s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Deinstall golang --------------------------------------- 1.10s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Delete old go version installed from upstream ---------- 1.04s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Download opm ------------------------------------------- 1.00s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Download kuttl ----------------------------------------- 0.84s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Set alternatives link to installed go version ---------- 0.52s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Download kubectl --------------------------------------- 0.47s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Create $HOME/bin dir ----------------------------------- 0.30s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Link yq_linux_amd64 as yq ------------------------------ 0.19s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Set operator-sdk file for version >= 1.3.0 ------------- 0.06s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Clean bash cache --------------------------------------- 0.06s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Get version from sdk_version --------------------------- 0.04s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Set operator-sdk file for version < 1.3.0 -------------- 0.04s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Set opm download url suffix ---------------------------- 0.03s 2025-10-06 21:11:17,668 p=29044 u=zuul n=ansible | download_tools : Set opm download url suffix ---------------------------- 0.03s home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_001_fetch_openshift.log0000644000175000017500000000035215071030170027613 0ustar zuulzuulWARNING: Using insecure TLS client config. Setting this option is not supported! Login successful. You have access to 64 projects, the list has been suppressed. You can list all projects with 'oc projects' Using project "default". home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_002_run_hook_without_retry_fetch.log0000644000175000017500000002714715071030237032470 0ustar zuulzuul[WARNING]: Found variable using reserved name: namespace PLAY [Sync repos for controller to compute for periodic jobs and gating repo] *** TASK [Gathering Facts ] ******************************************************** Monday 06 October 2025 21:11:49 +0000 (0:00:00.010) 0:00:00.010 ******** ok: [compute-1] ok: [compute-0] TASK [Check for gating repo on controller path={{ cifmw_basedir }}/artifacts/repositories/gating.repo] *** Monday 06 October 2025 21:11:50 +0000 (0:00:01.412) 0:00:01.423 ******** ok: [compute-1 -> controller(38.102.83.51)] ok: [compute-0 -> controller(38.102.83.51)] TASK [Copy repositories from controller to computes dest=/etc/yum.repos.d/, src={{ cifmw_basedir }}/artifacts/repositories/, mode=0755] *** Monday 06 October 2025 21:11:51 +0000 (0:00:00.655) 0:00:02.078 ******** changed: [compute-1] changed: [compute-0] PLAY [Build dataset hook] ****************************************************** TASK [Load parameters dir={{ item }}, ignore_unknown_extensions=True, extensions=['yaml', 'yml']] *** Monday 06 October 2025 21:11:56 +0000 (0:00:05.026) 0:00:07.104 ******** ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) ok: [localhost] => (item=/etc/ci/env) TASK [Ensure CRC hostname is set _crc_hostname={{ cifmw_crc_hostname | default('crc') }}] *** Monday 06 October 2025 21:11:56 +0000 (0:00:00.096) 0:00:07.200 ******** ok: [localhost] TASK [Check we have some compute in inventory computes_len={{ groups['computes'] | default([]) | length }}] *** Monday 06 October 2025 21:11:56 +0000 (0:00:00.066) 0:00:07.267 ******** ok: [localhost] TASK [Ensure that the isolated net was configured for crc that=['crc_ci_bootstrap_networks_out is defined', 'crc_ci_bootstrap_networks_out[_crc_hostname] is defined', "crc_ci_bootstrap_networks_out[_crc_hostname]['default'] is defined"]] *** Monday 06 October 2025 21:11:56 +0000 (0:00:00.045) 0:00:07.313 ******** ok: [localhost] => changed: false msg: All assertions passed TASK [Ensure we have needed bits for compute when needed that=['crc_ci_bootstrap_networks_out[_first_compute] is defined', "crc_ci_bootstrap_networks_out[_first_compute]['default'] is defined"]] *** Monday 06 October 2025 21:11:56 +0000 (0:00:00.041) 0:00:07.354 ******** ok: [localhost] => changed: false msg: All assertions passed TASK [Set facts for further usage within the framework cifmw_edpm_prepare_extra_vars={'NNCP_INTERFACE': '{{ crc_ci_bootstrap_networks_out[_crc_hostname].default.iface }}', 'NNCP_DNS_SERVER': "{{\n cifmw_nncp_dns_server |\n default(crc_ci_bootstrap_networks_out[_crc_hostname].default.ip) |\n split('/') | first\n}}", 'NETWORK_MTU': '{{ crc_ci_bootstrap_networks_out[_crc_hostname].default.mtu }}'}] *** Monday 06 October 2025 21:11:56 +0000 (0:00:00.049) 0:00:07.404 ******** ok: [localhost] TASK [Ensure the kustomizations dirs exists path={{ cifmw_basedir }}/artifacts/manifests/kustomizations/{{ item }}, state=directory, mode=0755] *** Monday 06 October 2025 21:11:56 +0000 (0:00:00.041) 0:00:07.445 ******** changed: [localhost] => (item=dataplane) changed: [localhost] => (item=controlplane) TASK [Create OpenStackControlPlane CR Kustomization dest={{ cifmw_basedir }}/artifacts/manifests/kustomizations/controlplane/99-kustomization.yaml, content=apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: {{ namespace }} patches: - target: kind: OpenStackControlPlane patch: |- - op: replace path: /spec/dns/template/options value: [ { "key": "server", "values": [ "192.168.122.10" ] }, { "key": "no-negcache", "values": [] } ], mode=0644] *** Monday 06 October 2025 21:11:57 +0000 (0:00:00.442) 0:00:07.887 ******** changed: [localhost] TASK [Set specific fact for compute accesses cifmw_edpm_deploy_extra_vars={{ edpm_install_yamls_vars }}] *** Monday 06 October 2025 21:11:57 +0000 (0:00:00.447) 0:00:08.334 ******** ok: [localhost] TASK [Create EDPM CR Kustomization mode=0644, dest={{ cifmw_basedir }}/artifacts/manifests/kustomizations/dataplane/99-kustomization.yaml, content=apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: {{ namespace }} patches: - target: kind: OpenStackDataPlaneNodeSet patch: |- {% for compute_node in groups['computes'] %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/hostName value: "{{compute_node}}" {% endfor %} - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/neutron_public_interface_name value: "{{ crc_ci_bootstrap_networks_out[_first_compute].default.iface | default('') }}" {% for compute_node in groups['computes'] %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/networks/0/defaultRoute value: false {% endfor %} {% for compute_node in groups['computes'] if compute_node != _first_compute %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/ansible/ansibleHost value: >- {{ crc_ci_bootstrap_networks_out[compute_node].default.ip4 | default(crc_ci_bootstrap_networks_out[compute_node].default.ip) | ansible.utils.ipaddr('address') }} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/networks/0/fixedIP value: >- {{ crc_ci_bootstrap_networks_out[compute_node].default.ip4 | default(crc_ci_bootstrap_networks_out[compute_node].default.ip) | ansible.utils.ipaddr('address') }} {% endfor %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_os_net_config_mappings value: net_config_data_lookup: edpm-compute: nic2: "{{ crc_ci_bootstrap_networks_out[_first_compute].default.iface | default('ens7') }}" - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_network_config_debug value: true - op: add path: /spec/env value: {} - op: add path: /spec/env value: - name: "ANSIBLE_VERBOSITY" value: "2" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/edpm_network_config_template value: |- {%- raw %} --- {% set mtu_list = [ctlplane_mtu] %} {% for network in nodeset_networks %} {% set _ = mtu_list.append(lookup('vars', networks_lower[network] ~ '_mtu')) %} {%- endfor %} {% set min_viable_mtu = mtu_list | max %} network_config: - type: interface name: nic1 use_dhcp: true mtu: {{ min_viable_mtu }} - type: ovs_bridge name: {{ neutron_physical_bridge_name }} mtu: {{ min_viable_mtu }} use_dhcp: false dns_servers: {{ ctlplane_dns_nameservers }} domain: {{ dns_search_domains }} addresses: - ip_netmask: {{ ctlplane_ip }}/{{ ctlplane_cidr }} routes: {{ ctlplane_host_routes }} members: - type: interface name: nic2 mtu: {{ min_viable_mtu }} # force the MAC address of the bridge to this interface primary: true {% if edpm_network_config_nmstate | bool %} # this ovs_extra configuration fixes OSPRH-17551, but it will be not needed when FDP-1472 is resolved ovs_extra: - "set interface eth1 external-ids:ovn-egress-iface=true" {% endif %} {% for network in nodeset_networks %} - type: vlan mtu: {{ lookup('vars', networks_lower[network] ~ '_mtu') }} vlan_id: {{ lookup('vars', networks_lower[network] ~ '_vlan_id') }} addresses: - ip_netmask: {{ lookup('vars', networks_lower[network] ~ '_ip') }}/{{ lookup('vars', networks_lower[network] ~ '_cidr') }} routes: {{ lookup('vars', networks_lower[network] ~ '_host_routes') }} {% endfor %} {% endraw %} - op: replace path: /spec/nodeTemplate/ansible/ansibleUser value: "{{ hostvars[_first_compute].ansible_user | default('zuul') }}" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/ctlplane_dns_nameservers value: {% for dns_server in dns_servers %} - "{{ dns_server }}" {% endfor %} {% if content_provider_registry_ip is defined %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_container_registry_insecure_registries value: ["{{ content_provider_registry_ip }}:5001"] {% endif %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_sshd_allowed_ranges value: ["0.0.0.0/0"] {% if cifmw_hook_fetch_compute_facts_edpm_cmd is defined %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_bootstrap_command value: |- {{ cifmw_hook_fetch_compute_facts_edpm_cmd | indent( width=8) }} {% endif %} {% if cifmw_edpm_telemetry_enabled_exporters is defined and cifmw_edpm_telemetry_enabled_exporters | length > 0 %} - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/edpm_telemetry_enabled_exporters value: {% for exporter in cifmw_edpm_telemetry_enabled_exporters %} - "{{ exporter }}" {% endfor %} {% endif %}] *** Monday 06 October 2025 21:11:57 +0000 (0:00:00.131) 0:00:08.465 ******** changed: [localhost] TASK [Ensure we know about the private host keys _raw_params=ssh-keyscan {{ cifmw_edpm_deploy_extra_vars.DATAPLANE_COMPUTE_IP }} >> ~/.ssh/known_hosts ] *** Monday 06 October 2025 21:11:58 +0000 (0:00:00.537) 0:00:09.003 ******** changed: [localhost] TASK [Save compute info dest={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml, content={{ file_content | to_nice_yaml }}, mode=0644] *** Monday 06 October 2025 21:11:58 +0000 (0:00:00.372) 0:00:09.376 ******** changed: [localhost] PLAY RECAP ********************************************************************* compute-0 : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 compute-1 : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 localhost : ok=12 changed=5 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 Monday 06 October 2025 21:11:59 +0000 (0:00:00.418) 0:00:09.794 ******** =============================================================================== Copy repositories from controller to computes --------------------------- 5.03s Gathering Facts --------------------------------------------------------- 1.41s Check for gating repo on controller ------------------------------------- 0.66s Create EDPM CR Kustomization -------------------------------------------- 0.54s Create OpenStackControlPlane CR Kustomization --------------------------- 0.45s Ensure the kustomizations dirs exists ----------------------------------- 0.44s Save compute info ------------------------------------------------------- 0.42s Ensure we know about the private host keys ------------------------------ 0.37s Set specific fact for compute accesses ---------------------------------- 0.13s Load parameters --------------------------------------------------------- 0.10s Ensure CRC hostname is set ---------------------------------------------- 0.07s Ensure we have needed bits for compute when needed ---------------------- 0.05s Check we have some compute in inventory --------------------------------- 0.05s Ensure that the isolated net was configured for crc --------------------- 0.04s Set facts for further usage within the framework ------------------------ 0.04s home/zuul/zuul-output/logs/ci-framework-data/logs/post_infra_fetch_nodes_facts_and_save_the.log0000644000175000017500000003642515071030237032427 0ustar zuulzuul2025-10-06 21:11:49,369 p=30221 u=zuul n=ansible | [WARNING]: Found variable using reserved name: namespace 2025-10-06 21:11:49,369 p=30221 u=zuul n=ansible | PLAY [Sync repos for controller to compute for periodic jobs and gating repo] *** 2025-10-06 21:11:49,378 p=30221 u=zuul n=ansible | TASK [Gathering Facts ] ******************************************************** 2025-10-06 21:11:49,378 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:49 +0000 (0:00:00.010) 0:00:00.010 ******** 2025-10-06 21:11:50,739 p=30221 u=zuul n=ansible | ok: [compute-1] 2025-10-06 21:11:50,750 p=30221 u=zuul n=ansible | ok: [compute-0] 2025-10-06 21:11:50,790 p=30221 u=zuul n=ansible | TASK [Check for gating repo on controller path={{ cifmw_basedir }}/artifacts/repositories/gating.repo] *** 2025-10-06 21:11:50,790 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:50 +0000 (0:00:01.412) 0:00:01.423 ******** 2025-10-06 21:11:51,425 p=30221 u=zuul n=ansible | ok: [compute-1 -> controller(38.102.83.51)] 2025-10-06 21:11:51,440 p=30221 u=zuul n=ansible | ok: [compute-0 -> controller(38.102.83.51)] 2025-10-06 21:11:51,445 p=30221 u=zuul n=ansible | TASK [Copy repositories from controller to computes dest=/etc/yum.repos.d/, src={{ cifmw_basedir }}/artifacts/repositories/, mode=0755] *** 2025-10-06 21:11:51,446 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:51 +0000 (0:00:00.655) 0:00:02.078 ******** 2025-10-06 21:11:55,560 p=30221 u=zuul n=ansible | changed: [compute-1] 2025-10-06 21:11:56,400 p=30221 u=zuul n=ansible | changed: [compute-0] 2025-10-06 21:11:56,442 p=30221 u=zuul n=ansible | PLAY [Build dataset hook] ****************************************************** 2025-10-06 21:11:56,472 p=30221 u=zuul n=ansible | TASK [Load parameters dir={{ item }}, ignore_unknown_extensions=True, extensions=['yaml', 'yml']] *** 2025-10-06 21:11:56,472 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:56 +0000 (0:00:05.026) 0:00:07.104 ******** 2025-10-06 21:11:56,550 p=30221 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-10-06 21:11:56,553 p=30221 u=zuul n=ansible | ok: [localhost] => (item=/etc/ci/env) 2025-10-06 21:11:56,568 p=30221 u=zuul n=ansible | TASK [Ensure CRC hostname is set _crc_hostname={{ cifmw_crc_hostname | default('crc') }}] *** 2025-10-06 21:11:56,568 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:56 +0000 (0:00:00.096) 0:00:07.200 ******** 2025-10-06 21:11:56,605 p=30221 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:56,634 p=30221 u=zuul n=ansible | TASK [Check we have some compute in inventory computes_len={{ groups['computes'] | default([]) | length }}] *** 2025-10-06 21:11:56,634 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:56 +0000 (0:00:00.066) 0:00:07.267 ******** 2025-10-06 21:11:56,671 p=30221 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:56,680 p=30221 u=zuul n=ansible | TASK [Ensure that the isolated net was configured for crc that=['crc_ci_bootstrap_networks_out is defined', 'crc_ci_bootstrap_networks_out[_crc_hostname] is defined', "crc_ci_bootstrap_networks_out[_crc_hostname]['default'] is defined"]] *** 2025-10-06 21:11:56,680 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:56 +0000 (0:00:00.045) 0:00:07.313 ******** 2025-10-06 21:11:56,712 p=30221 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-10-06 21:11:56,721 p=30221 u=zuul n=ansible | TASK [Ensure we have needed bits for compute when needed that=['crc_ci_bootstrap_networks_out[_first_compute] is defined', "crc_ci_bootstrap_networks_out[_first_compute]['default'] is defined"]] *** 2025-10-06 21:11:56,722 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:56 +0000 (0:00:00.041) 0:00:07.354 ******** 2025-10-06 21:11:56,761 p=30221 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-10-06 21:11:56,771 p=30221 u=zuul n=ansible | TASK [Set facts for further usage within the framework cifmw_edpm_prepare_extra_vars={'NNCP_INTERFACE': '{{ crc_ci_bootstrap_networks_out[_crc_hostname].default.iface }}', 'NNCP_DNS_SERVER': "{{\n cifmw_nncp_dns_server |\n default(crc_ci_bootstrap_networks_out[_crc_hostname].default.ip) |\n split('/') | first\n}}", 'NETWORK_MTU': '{{ crc_ci_bootstrap_networks_out[_crc_hostname].default.mtu }}'}] *** 2025-10-06 21:11:56,771 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:56 +0000 (0:00:00.049) 0:00:07.404 ******** 2025-10-06 21:11:56,803 p=30221 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:56,812 p=30221 u=zuul n=ansible | TASK [Ensure the kustomizations dirs exists path={{ cifmw_basedir }}/artifacts/manifests/kustomizations/{{ item }}, state=directory, mode=0755] *** 2025-10-06 21:11:56,812 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:56 +0000 (0:00:00.041) 0:00:07.445 ******** 2025-10-06 21:11:57,089 p=30221 u=zuul n=ansible | changed: [localhost] => (item=dataplane) 2025-10-06 21:11:57,244 p=30221 u=zuul n=ansible | changed: [localhost] => (item=controlplane) 2025-10-06 21:11:57,254 p=30221 u=zuul n=ansible | TASK [Create OpenStackControlPlane CR Kustomization dest={{ cifmw_basedir }}/artifacts/manifests/kustomizations/controlplane/99-kustomization.yaml, content=apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: {{ namespace }} patches: - target: kind: OpenStackControlPlane patch: |- - op: replace path: /spec/dns/template/options value: [ { "key": "server", "values": [ "192.168.122.10" ] }, { "key": "no-negcache", "values": [] } ], mode=0644] *** 2025-10-06 21:11:57,254 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:57 +0000 (0:00:00.442) 0:00:07.887 ******** 2025-10-06 21:11:57,680 p=30221 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:57,701 p=30221 u=zuul n=ansible | TASK [Set specific fact for compute accesses cifmw_edpm_deploy_extra_vars={{ edpm_install_yamls_vars }}] *** 2025-10-06 21:11:57,701 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:57 +0000 (0:00:00.447) 0:00:08.334 ******** 2025-10-06 21:11:57,798 p=30221 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:57,833 p=30221 u=zuul n=ansible | TASK [Create EDPM CR Kustomization mode=0644, dest={{ cifmw_basedir }}/artifacts/manifests/kustomizations/dataplane/99-kustomization.yaml, content=apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: {{ namespace }} patches: - target: kind: OpenStackDataPlaneNodeSet patch: |- {% for compute_node in groups['computes'] %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/hostName value: "{{compute_node}}" {% endfor %} - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/neutron_public_interface_name value: "{{ crc_ci_bootstrap_networks_out[_first_compute].default.iface | default('') }}" {% for compute_node in groups['computes'] %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/networks/0/defaultRoute value: false {% endfor %} {% for compute_node in groups['computes'] if compute_node != _first_compute %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/ansible/ansibleHost value: >- {{ crc_ci_bootstrap_networks_out[compute_node].default.ip4 | default(crc_ci_bootstrap_networks_out[compute_node].default.ip) | ansible.utils.ipaddr('address') }} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/networks/0/fixedIP value: >- {{ crc_ci_bootstrap_networks_out[compute_node].default.ip4 | default(crc_ci_bootstrap_networks_out[compute_node].default.ip) | ansible.utils.ipaddr('address') }} {% endfor %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_os_net_config_mappings value: net_config_data_lookup: edpm-compute: nic2: "{{ crc_ci_bootstrap_networks_out[_first_compute].default.iface | default('ens7') }}" - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_network_config_debug value: true - op: add path: /spec/env value: {} - op: add path: /spec/env value: - name: "ANSIBLE_VERBOSITY" value: "2" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/edpm_network_config_template value: |- {%- raw %} --- {% set mtu_list = [ctlplane_mtu] %} {% for network in nodeset_networks %} {% set _ = mtu_list.append(lookup('vars', networks_lower[network] ~ '_mtu')) %} {%- endfor %} {% set min_viable_mtu = mtu_list | max %} network_config: - type: interface name: nic1 use_dhcp: true mtu: {{ min_viable_mtu }} - type: ovs_bridge name: {{ neutron_physical_bridge_name }} mtu: {{ min_viable_mtu }} use_dhcp: false dns_servers: {{ ctlplane_dns_nameservers }} domain: {{ dns_search_domains }} addresses: - ip_netmask: {{ ctlplane_ip }}/{{ ctlplane_cidr }} routes: {{ ctlplane_host_routes }} members: - type: interface name: nic2 mtu: {{ min_viable_mtu }} # force the MAC address of the bridge to this interface primary: true {% if edpm_network_config_nmstate | bool %} # this ovs_extra configuration fixes OSPRH-17551, but it will be not needed when FDP-1472 is resolved ovs_extra: - "set interface eth1 external-ids:ovn-egress-iface=true" {% endif %} {% for network in nodeset_networks %} - type: vlan mtu: {{ lookup('vars', networks_lower[network] ~ '_mtu') }} vlan_id: {{ lookup('vars', networks_lower[network] ~ '_vlan_id') }} addresses: - ip_netmask: {{ lookup('vars', networks_lower[network] ~ '_ip') }}/{{ lookup('vars', networks_lower[network] ~ '_cidr') }} routes: {{ lookup('vars', networks_lower[network] ~ '_host_routes') }} {% endfor %} {% endraw %} - op: replace path: /spec/nodeTemplate/ansible/ansibleUser value: "{{ hostvars[_first_compute].ansible_user | default('zuul') }}" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/ctlplane_dns_nameservers value: {% for dns_server in dns_servers %} - "{{ dns_server }}" {% endfor %} {% if content_provider_registry_ip is defined %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_container_registry_insecure_registries value: ["{{ content_provider_registry_ip }}:5001"] {% endif %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_sshd_allowed_ranges value: ["0.0.0.0/0"] {% if cifmw_hook_fetch_compute_facts_edpm_cmd is defined %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_bootstrap_command value: |- {{ cifmw_hook_fetch_compute_facts_edpm_cmd | indent( width=8) }} {% endif %} {% if cifmw_edpm_telemetry_enabled_exporters is defined and cifmw_edpm_telemetry_enabled_exporters | length > 0 %} - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/edpm_telemetry_enabled_exporters value: {% for exporter in cifmw_edpm_telemetry_enabled_exporters %} - "{{ exporter }}" {% endfor %} {% endif %}] *** 2025-10-06 21:11:57,833 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:57 +0000 (0:00:00.131) 0:00:08.465 ******** 2025-10-06 21:11:58,361 p=30221 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:58,370 p=30221 u=zuul n=ansible | TASK [Ensure we know about the private host keys _raw_params=ssh-keyscan {{ cifmw_edpm_deploy_extra_vars.DATAPLANE_COMPUTE_IP }} >> ~/.ssh/known_hosts ] *** 2025-10-06 21:11:58,371 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:58 +0000 (0:00:00.537) 0:00:09.003 ******** 2025-10-06 21:11:58,733 p=30221 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:58,743 p=30221 u=zuul n=ansible | TASK [Save compute info dest={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml, content={{ file_content | to_nice_yaml }}, mode=0644] *** 2025-10-06 21:11:58,743 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:58 +0000 (0:00:00.372) 0:00:09.376 ******** 2025-10-06 21:11:59,133 p=30221 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | compute-0 : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | compute-1 : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | localhost : ok=12 changed=5 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.418) 0:00:09.794 ******** 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | =============================================================================== 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Copy repositories from controller to computes --------------------------- 5.03s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Gathering Facts --------------------------------------------------------- 1.41s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Check for gating repo on controller ------------------------------------- 0.66s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Create EDPM CR Kustomization -------------------------------------------- 0.54s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Create OpenStackControlPlane CR Kustomization --------------------------- 0.45s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Ensure the kustomizations dirs exists ----------------------------------- 0.44s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Save compute info ------------------------------------------------------- 0.42s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Ensure we know about the private host keys ------------------------------ 0.37s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Set specific fact for compute accesses ---------------------------------- 0.13s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Load parameters --------------------------------------------------------- 0.10s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Ensure CRC hostname is set ---------------------------------------------- 0.07s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Ensure we have needed bits for compute when needed ---------------------- 0.05s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Check we have some compute in inventory --------------------------------- 0.05s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Ensure that the isolated net was configured for crc --------------------- 0.04s 2025-10-06 21:11:59,162 p=30221 u=zuul n=ansible | Set facts for further usage within the framework ------------------------ 0.04s home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_003_run_hook_without_retry_80.log0000644000175000017500000000273215071030244031616 0ustar zuulzuul[WARNING]: Found variable using reserved name: namespace PLAY [Kustomize ControlPlane for horizon service] ****************************** TASK [Ensure the kustomizations dir exists path={{ cifmw_basedir }}/artifacts/manifests/kustomizations/controlplane, state=directory, mode=0755] *** Monday 06 October 2025 21:12:03 +0000 (0:00:00.045) 0:00:00.045 ******** ok: [localhost] TASK [Create kustomize yaml to enable Horizon dest={{ cifmw_basedir }}/artifacts/manifests/kustomizations/controlplane/80-horizon-kustomization.yaml, content=apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization namespace: {{ namespace }} patches: - target: kind: OpenStackControlPlane patch: |- - op: add path: /spec/horizon/enabled value: true - op: add path: /spec/horizon/template/memcachedInstance value: memcached, mode=0644] *** Monday 06 October 2025 21:12:04 +0000 (0:00:00.334) 0:00:00.379 ******** changed: [localhost] PLAY RECAP ********************************************************************* localhost : ok=2 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 Monday 06 October 2025 21:12:04 +0000 (0:00:00.606) 0:00:00.986 ******** =============================================================================== Create kustomize yaml to enable Horizon --------------------------------- 0.61s Ensure the kustomizations dir exists ------------------------------------ 0.33s home/zuul/zuul-output/logs/ci-framework-data/logs/pre_deploy_80_kustomize_openstack_cr.log0000644000175000017500000000424015071030244031344 0ustar zuulzuul2025-10-06 21:12:03,862 p=30703 u=zuul n=ansible | [WARNING]: Found variable using reserved name: namespace 2025-10-06 21:12:03,862 p=30703 u=zuul n=ansible | PLAY [Kustomize ControlPlane for horizon service] ****************************** 2025-10-06 21:12:03,893 p=30703 u=zuul n=ansible | TASK [Ensure the kustomizations dir exists path={{ cifmw_basedir }}/artifacts/manifests/kustomizations/controlplane, state=directory, mode=0755] *** 2025-10-06 21:12:03,893 p=30703 u=zuul n=ansible | Monday 06 October 2025 21:12:03 +0000 (0:00:00.045) 0:00:00.045 ******** 2025-10-06 21:12:04,214 p=30703 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:04,227 p=30703 u=zuul n=ansible | TASK [Create kustomize yaml to enable Horizon dest={{ cifmw_basedir }}/artifacts/manifests/kustomizations/controlplane/80-horizon-kustomization.yaml, content=apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization namespace: {{ namespace }} patches: - target: kind: OpenStackControlPlane patch: |- - op: add path: /spec/horizon/enabled value: true - op: add path: /spec/horizon/template/memcachedInstance value: memcached, mode=0644] *** 2025-10-06 21:12:04,228 p=30703 u=zuul n=ansible | Monday 06 October 2025 21:12:04 +0000 (0:00:00.334) 0:00:00.379 ******** 2025-10-06 21:12:04,802 p=30703 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:12:04,834 p=30703 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-10-06 21:12:04,834 p=30703 u=zuul n=ansible | localhost : ok=2 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 2025-10-06 21:12:04,834 p=30703 u=zuul n=ansible | Monday 06 October 2025 21:12:04 +0000 (0:00:00.606) 0:00:00.986 ******** 2025-10-06 21:12:04,834 p=30703 u=zuul n=ansible | =============================================================================== 2025-10-06 21:12:04,834 p=30703 u=zuul n=ansible | Create kustomize yaml to enable Horizon --------------------------------- 0.61s 2025-10-06 21:12:04,834 p=30703 u=zuul n=ansible | Ensure the kustomizations dir exists ------------------------------------ 0.33s home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_004_run_hook_without_retry_create.log0000644000175000017500000000526615071030247032643 0ustar zuulzuul[WARNING]: Found variable using reserved name: namespace PLAY [Deploy cluster-observability-operator] *********************************** TASK [Create the COO subscription _raw_params=oc create -f - < changed: true cmd: | oc create -f - < changed: true cmd: | oc create -f - <, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"endpointslice.kubernetes.io/skip-mirror":"true"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:"kube-apiserver", Operation:"Update", APIVersion:"v1", Time:time.Date(2025, time.August, 13, 20, 8, 43, 0, time.Local), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0xc002cb4f90), Subresource:""}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)) because All is well clusteroperator/kube-apiserver is progressing: NodeInstallerProgressing: 1 node is at revision 12; 0 nodes have achieved new revision 13 clusteroperator/machine-config is degraded because Failed to resync 4.16.0 because: error required MachineConfigPool master is paused and cannot sync until it is unpaused clusteroperator/network is progressing: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) clusteroperator/cloud-credential is missing clusteroperator/cluster-autoscaler is missing clusteroperator/insights is missing clusteroperator/monitoring is missing clusteroperator/storage is missing [must-gather ] OUT 2025-10-06T21:12:42.543938682Z namespace/openshift-must-gather-9wr6n created [must-gather ] OUT 2025-10-06T21:12:42.549448092Z clusterrolebinding.rbac.authorization.k8s.io/must-gather-2679m created [must-gather ] OUT 2025-10-06T21:12:43.589841409Z namespace/openshift-must-gather-9wr6n deleted Reprinting Cluster State: When opening a support case, bugzilla, or issue please include the following summary data along with any other requested information: ClusterID: a84dabf3-edcf-4828-b6a1-f9d3a6f02304 ClientVersion: 4.19.13 ClusterVersion: Stable at "4.16.0" ClusterOperators: clusteroperator/authentication is not available (WellKnownAvailable: The well-known endpoint is not yet available: failed to get API server IPs: unable to find kube api server endpointLister port: &v1.Endpoints{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"kubernetes", GenerateName:"", Namespace:"default", SelfLink:"", UID:"9374e9d0-f290-4faa-94c7-262a199a1d45", ResourceVersion:"43093", Generation:0, CreationTimestamp:time.Date(2024, time.June, 26, 12, 38, 3, 0, time.Local), DeletionTimestamp:, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"endpointslice.kubernetes.io/skip-mirror":"true"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:"kube-apiserver", Operation:"Update", APIVersion:"v1", Time:time.Date(2025, time.August, 13, 20, 8, 43, 0, time.Local), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0xc002cb4f90), Subresource:""}}}, Subsets:[]v1.EndpointSubset(nil)} (check kube-apiserver that it deploys correctly)) because All is well clusteroperator/kube-apiserver is progressing: NodeInstallerProgressing: 1 node is at revision 12; 0 nodes have achieved new revision 13 clusteroperator/machine-config is degraded because Failed to resync 4.16.0 because: error required MachineConfigPool master is paused and cannot sync until it is unpaused clusteroperator/network is progressing: DaemonSet "/openshift-ovn-kubernetes/ovnkube-node" is not available (awaiting 1 nodes) DaemonSet "/openshift-multus/multus" is not available (awaiting 1 nodes) clusteroperator/cloud-credential is missing clusteroperator/cluster-autoscaler is missing clusteroperator/insights is missing clusteroperator/monitoring is missing clusteroperator/storage is missing Error from server (Forbidden): pods "must-gather-" is forbidden: error looking up service account openshift-must-gather-9wr6n/default: serviceaccount "default" not found home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_000_prepare_root_ssh.log0000644000175000017500000045530215071030332030031 0ustar zuulzuulPseudo-terminal will not be allocated because stdin is not a terminal. Red Hat Enterprise Linux CoreOS 416.94.202406172220-0 Part of OpenShift 4.16, RHCOS is a Kubernetes-native operating system managed by the Machine Config Operator (`clusteroperator/machine-config`). WARNING: Direct SSH access to machines is not recommended; instead, make configuration changes via `machineconfig` objects: https://docs.openshift.com/container-platform/4.16/architecture/architecture-rhcos.html --- + test -d /etc/ssh/sshd_config.d/ + sudo sed -ri 's/PermitRootLogin no/PermitRootLogin prohibit-password/' '/etc/ssh/sshd_config.d/*' sed: can't read /etc/ssh/sshd_config.d/*: No such file or directory + true + sudo sed -i 's/PermitRootLogin no/PermitRootLogin prohibit-password/' /etc/ssh/sshd_config + sudo systemctl restart sshd + sudo cp -r .ssh /root/ + sudo chown -R root: /root/.ssh + mkdir -p /tmp/crc-logs-artifacts + sudo cp -av /ostree/deploy/rhcos/var/log/pods /tmp/crc-logs-artifacts/ '/ostree/deploy/rhcos/var/log/pods' -> '/tmp/crc-logs-artifacts/pods' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8/installer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8/installer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8/installer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8/installer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/4.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/4.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/5.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/5.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/3.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/3.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/4.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/4.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/5.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/5.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_7dae59545f22b3fb679a7fbf878a6379' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_7dae59545f22b3fb679a7fbf878a6379' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_7dae59545f22b3fb679a7fbf878a6379/startup-monitor' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_7dae59545f22b3fb679a7fbf878a6379/startup-monitor' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15/installer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15/installer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15/installer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15/installer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655/installer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655/installer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655/installer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655/installer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/0.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/0.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/1.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/1.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/0.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/0.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/1.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/1.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/0.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/0.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/1.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/1.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/1.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/1.log' '/ostree/deploy/rhcos/var/log/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/0.log' -> '/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-utilities' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-utilities' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-utilities/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-utilities/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-content' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-content' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-content/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-content/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/registry-server' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/registry-server' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/registry-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/registry-server/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/1.log' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34/cert-manager-cainjector' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34/cert-manager-cainjector' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34/cert-manager-cainjector/0.log' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34/cert-manager-cainjector/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754/installer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754/installer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754/installer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754/installer/0.log' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2/cert-manager-controller' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2/cert-manager-controller' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2/cert-manager-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2/cert-manager-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb/collect-profiles' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb/collect-profiles' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb/collect-profiles/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb/collect-profiles/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/3.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/3.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/5.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/5.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/4.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/4.log' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/1.log' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10/cert-manager-webhook' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10/cert-manager-webhook' '/ostree/deploy/rhcos/var/log/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10/cert-manager-webhook/0.log' -> '/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10/cert-manager-webhook/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/9.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/9.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/8.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/8.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/4.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/4.log' '/ostree/deploy/rhcos/var/log/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/5.log' -> '/tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/5.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd/pruner' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd/pruner' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd/pruner/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd/pruner/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/3.log' -> '/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/3.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/northd' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/northd' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/northd/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/northd/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/nbdb' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/nbdb' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/nbdb/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/nbdb/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kubecfg-setup' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kubecfg-setup' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kubecfg-setup/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kubecfg-setup/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/sbdb' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/sbdb' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/sbdb/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/sbdb/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovnkube-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovnkube-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovnkube-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovnkube-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-acl-logging' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-acl-logging' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-acl-logging/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-acl-logging/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-node' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-node' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-node/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-node/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-ovn-metrics' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-ovn-metrics' '/ostree/deploy/rhcos/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-ovn-metrics/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-ovn-metrics/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/registry-server' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/registry-server' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/registry-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/registry-server/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-utilities' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-utilities' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-utilities/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-utilities/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-content' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-content' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-content/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-content/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/3.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/3.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/8.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/8.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/9.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/9.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/7.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/7.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1' -> '/tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1' '/ostree/deploy/rhcos/var/log/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console' -> '/tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console' '/ostree/deploy/rhcos/var/log/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/35b9f7c15e62ef6a8cb44a046562c85521277927a7458c49a1efe24eb45f23a3.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/35b9f7c15e62ef6a8cb44a046562c85521277927a7458c49a1efe24eb45f23a3.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/2c45b735c45341a1d77370cd8823760353056c6e1eff59259f19fde659c543fb.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/2c45b735c45341a1d77370cd8823760353056c6e1eff59259f19fde659c543fb.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/0.log' cp: cannot stat '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_image-registry-75779c45fd-v2j2v_f9a7bc46-2f44-4aff-9cb5-97c97a4a8319': No such file or directory '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/3.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/3.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c' -> '/tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c' '/ostree/deploy/rhcos/var/log/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server' -> '/tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server' '/ostree/deploy/rhcos/var/log/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/5.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/5.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/6.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/6.log' '/ostree/deploy/rhcos/var/log/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/7.log' -> '/tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/7.log' '/ostree/deploy/rhcos/var/log/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342' -> '/tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342' '/ostree/deploy/rhcos/var/log/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager' -> '/tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager' '/ostree/deploy/rhcos/var/log/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/1.log' cp: cannot stat '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_ae85115fdc231b4002b57317b41a6400': No such file or directory '/ostree/deploy/rhcos/var/log/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229/registry' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229/registry' '/ostree/deploy/rhcos/var/log/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229/registry/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229/registry/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/2.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/2.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124/installer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124/installer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124/installer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124/installer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90/installer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90/installer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90/installer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90/installer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed/collect-profiles' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed/collect-profiles' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed/collect-profiles/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed/collect-profiles/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53/installer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53/installer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53/installer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53/installer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200/pruner' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200/pruner' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200/pruner/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200/pruner/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-syncer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-syncer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-syncer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-syncer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-regeneration-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-regeneration-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-regeneration-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-regeneration-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-insecure-readyz' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-insecure-readyz' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-insecure-readyz/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-insecure-readyz/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-check-endpoints' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-check-endpoints' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-check-endpoints/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-check-endpoints/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/setup' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/setup' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/setup/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/setup/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc/marketplace-operator' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc/marketplace-operator' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc/marketplace-operator/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc/marketplace-operator/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc/pruner' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc/pruner' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc/pruner/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc/pruner/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72/collect-profiles' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72/collect-profiles' '/ostree/deploy/rhcos/var/log/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72/collect-profiles/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72/collect-profiles/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635/pruner' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635/pruner' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635/pruner/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635/pruner/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/1.log' -> '/tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/1.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-utilities' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-utilities' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-utilities/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-utilities/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-content' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-content' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-content/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-content/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/registry-server' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/registry-server' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/registry-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/registry-server/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a/installer' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a/installer' '/ostree/deploy/rhcos/var/log/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a/installer/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a/installer/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-utilities' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-utilities' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-utilities/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-utilities/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-content' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-content' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-content/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-content/0.log' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/registry-server' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/registry-server' '/ostree/deploy/rhcos/var/log/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/registry-server/0.log' -> '/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/registry-server/0.log' home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_000_copy_logs_from_crc.log0000644000175000017500000050027015071030333030317 0ustar zuulzuulExecuting: program /usr/bin/ssh host api.crc.testing, user core, command sftp OpenSSH_9.9p1, OpenSSL 3.5.1 1 Jul 2025 debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug1: re-parsing configuration debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: Connecting to api.crc.testing [38.102.83.110] port 22. debug1: Connection established. debug1: identity file /home/zuul/.ssh/id_cifw type 2 debug1: identity file /home/zuul/.ssh/id_cifw-cert type -1 debug1: Local version string SSH-2.0-OpenSSH_9.9 debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7 debug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000 debug1: Authenticating to api.crc.testing:22 as 'core' debug1: load_hostkeys: fopen /home/zuul/.ssh/known_hosts2: No such file or directory debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory debug1: SSH2_MSG_KEXINIT sent debug1: SSH2_MSG_KEXINIT received debug1: kex: algorithm: curve25519-sha256 debug1: kex: host key algorithm: ssh-ed25519 debug1: kex: server->client cipher: aes256-gcm@openssh.com MAC: compression: none debug1: kex: client->server cipher: aes256-gcm@openssh.com MAC: compression: none debug1: kex: curve25519-sha256 need=32 dh_need=32 debug1: kex: curve25519-sha256 need=32 dh_need=32 debug1: expecting SSH2_MSG_KEX_ECDH_REPLY debug1: SSH2_MSG_KEX_ECDH_REPLY received debug1: Server host key: ssh-ed25519 SHA256:/ZfZ15bRL0d31T2CAq03Iw4h8DAqA2+9vySbGcnzmJo debug1: load_hostkeys: fopen /home/zuul/.ssh/known_hosts2: No such file or directory debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory debug1: Host 'api.crc.testing' is known and matches the ED25519 host key. debug1: Found key in /home/zuul/.ssh/known_hosts:42 debug1: ssh_packet_send2_wrapped: resetting send seqnr 3 debug1: rekey out after 4294967296 blocks debug1: SSH2_MSG_NEWKEYS sent debug1: expecting SSH2_MSG_NEWKEYS debug1: ssh_packet_read_poll2: resetting read seqnr 3 debug1: SSH2_MSG_NEWKEYS received debug1: rekey in after 4294967296 blocks debug1: SSH2_MSG_EXT_INFO received debug1: kex_ext_info_client_parse: server-sig-algs= debug1: SSH2_MSG_SERVICE_ACCEPT received debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic debug1: Next authentication method: gssapi-with-mic debug1: No credentials were supplied, or the credentials were unavailable or inaccessible No Kerberos credentials available (default cache: KCM:) debug1: No credentials were supplied, or the credentials were unavailable or inaccessible No Kerberos credentials available (default cache: KCM:) debug1: Next authentication method: publickey debug1: Will attempt key: /home/zuul/.ssh/id_cifw ECDSA SHA256:G7Vn4QzA6Kjc401Jxpc9tDdir/4zpvt8qyRhwwPUE6U explicit debug1: Offering public key: /home/zuul/.ssh/id_cifw ECDSA SHA256:G7Vn4QzA6Kjc401Jxpc9tDdir/4zpvt8qyRhwwPUE6U explicit debug1: Server accepts key: /home/zuul/.ssh/id_cifw ECDSA SHA256:G7Vn4QzA6Kjc401Jxpc9tDdir/4zpvt8qyRhwwPUE6U explicit Authenticated to api.crc.testing ([38.102.83.110]:22) using "publickey". debug1: pkcs11_del_provider: called, provider_id = (null) debug1: channel 0: new session [client-session] (inactive timeout: 0) debug1: Requesting no-more-sessions@openssh.com debug1: Entering interactive session. debug1: pledge: filesystem debug1: client_input_global_request: rtype hostkeys-00@openssh.com want_reply 0 debug1: client_input_hostkeys: searching /home/zuul/.ssh/known_hosts for api.crc.testing / (none) debug1: client_input_hostkeys: searching /home/zuul/.ssh/known_hosts2 for api.crc.testing / (none) debug1: client_input_hostkeys: hostkeys file /home/zuul/.ssh/known_hosts2 does not exist debug1: client_input_hostkeys: no new or deprecated keys from server debug1: Remote: /var/home/core/.ssh/authorized_keys:28: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding debug1: Remote: /var/home/core/.ssh/authorized_keys:28: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding debug1: Sending subsystem: sftp debug1: pledge: fork scp: debug1: Fetching /tmp/crc-logs-artifacts/ to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/registry-server/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/registry-server/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/registry-server/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-content/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-content/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-content/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-utilities/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-utilities/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-utilities/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a/installer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a/installer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a/installer/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/registry-server/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/registry-server/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/registry-server/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-content/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-content/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-content/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-utilities/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-utilities/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-utilities/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635/pruner/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635/pruner/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635/pruner/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72/collect-profiles/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72/collect-profiles/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72/collect-profiles/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc/pruner/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc/pruner/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc/pruner/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc/marketplace-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc/marketplace-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc/marketplace-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/setup/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/setup/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/setup/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-check-endpoints/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-check-endpoints/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-check-endpoints/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-insecure-readyz/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-insecure-readyz/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-insecure-readyz/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-regeneration-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-regeneration-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-regeneration-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-syncer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-syncer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-syncer/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200/pruner/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200/pruner/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200/pruner/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53/installer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53/installer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53/installer/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed/collect-profiles/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed/collect-profiles/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed/collect-profiles/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90/installer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90/installer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90/installer/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124/installer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124/installer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124/installer/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229/registry/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229/registry/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229/registry/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/7.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/7.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/7.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/6.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/6.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/6.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/5.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/5.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server/5.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/3.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/3.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/3.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/2c45b735c45341a1d77370cd8823760353056c6e1eff59259f19fde659c543fb.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/2c45b735c45341a1d77370cd8823760353056c6e1eff59259f19fde659c543fb.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/2c45b735c45341a1d77370cd8823760353056c6e1eff59259f19fde659c543fb.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/35b9f7c15e62ef6a8cb44a046562c85521277927a7458c49a1efe24eb45f23a3.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/35b9f7c15e62ef6a8cb44a046562c85521277927a7458c49a1efe24eb45f23a3.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/35b9f7c15e62ef6a8cb44a046562c85521277927a7458c49a1efe24eb45f23a3.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/7.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/7.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/7.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/9.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/9.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/9.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/8.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/8.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller/8.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/3.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/3.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/3.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-content/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-content/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-content/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-utilities/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-utilities/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-utilities/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/registry-server/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/registry-server/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/registry-server/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-ovn-metrics/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-ovn-metrics/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-ovn-metrics/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-node/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-node/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-node/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-acl-logging/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-acl-logging/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-acl-logging/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovnkube-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovnkube-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovnkube-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/sbdb/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/sbdb/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/sbdb/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kubecfg-setup/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kubecfg-setup/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kubecfg-setup/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/nbdb/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/nbdb/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/nbdb/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/northd/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/northd/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/northd/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/3.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/3.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/3.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd/pruner/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd/pruner/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd/pruner/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/5.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/5.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/5.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/4.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/4.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/4.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/8.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/8.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/8.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/9.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/9.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus/9.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10/cert-manager-webhook/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10/cert-manager-webhook/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10/cert-manager-webhook/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/4.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/4.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/4.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/5.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/5.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/5.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/3.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/3.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router/3.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb/collect-profiles/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb/collect-profiles/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb/collect-profiles/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2/cert-manager-controller/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2/cert-manager-controller/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2/cert-manager-controller/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754/installer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754/installer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754/installer/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34/cert-manager-cainjector/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34/cert-manager-cainjector/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34/cert-manager-cainjector/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/registry-server/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/registry-server/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/registry-server/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-content/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-content/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-content/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-utilities/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-utilities/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-utilities/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655/installer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655/installer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655/installer/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15/installer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15/installer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15/installer/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/5.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/5.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/5.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/4.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/4.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/4.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/3.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/3.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/3.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/5.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/5.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/5.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/4.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/4.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/4.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/2.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/2.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/2.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/1.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/1.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/1.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy/0.log failed scp: remote open "/tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8/installer/0.log": Permission denied scp: Download of file /tmp/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8/installer/0.log to /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8/installer/0.log failed debug1: client_input_channel_req: channel 0 rtype exit-status reply 0 debug1: channel 0: free: client-session, nchannels 1 Transferred: sent 122464, received 183920 bytes, in 0.9 seconds Bytes per second: sent 137856.6, received 207037.1 debug1: Exit status 0 home/zuul/zuul-output/logs/ci-framework-data/logs/ansible.log0000644000175000017500000050126315071030247023455 0ustar zuulzuul2025-10-06 21:09:22,224 p=27534 u=zuul n=ansible | Starting galaxy collection install process 2025-10-06 21:09:22,225 p=27534 u=zuul n=ansible | Process install dependency map 2025-10-06 21:09:36,208 p=27534 u=zuul n=ansible | Starting collection install process 2025-10-06 21:09:36,208 p=27534 u=zuul n=ansible | Installing 'cifmw.general:1.0.0+35b8986b' to '/home/zuul/.ansible/collections/ansible_collections/cifmw/general' 2025-10-06 21:09:36,658 p=27534 u=zuul n=ansible | Created collection for cifmw.general:1.0.0+35b8986b at /home/zuul/.ansible/collections/ansible_collections/cifmw/general 2025-10-06 21:09:36,659 p=27534 u=zuul n=ansible | cifmw.general:1.0.0+35b8986b was installed successfully 2025-10-06 21:09:36,659 p=27534 u=zuul n=ansible | Installing 'containers.podman:1.16.2' to '/home/zuul/.ansible/collections/ansible_collections/containers/podman' 2025-10-06 21:09:36,731 p=27534 u=zuul n=ansible | Created collection for containers.podman:1.16.2 at /home/zuul/.ansible/collections/ansible_collections/containers/podman 2025-10-06 21:09:36,731 p=27534 u=zuul n=ansible | containers.podman:1.16.2 was installed successfully 2025-10-06 21:09:36,731 p=27534 u=zuul n=ansible | Installing 'community.general:10.0.1' to '/home/zuul/.ansible/collections/ansible_collections/community/general' 2025-10-06 21:09:37,480 p=27534 u=zuul n=ansible | Created collection for community.general:10.0.1 at /home/zuul/.ansible/collections/ansible_collections/community/general 2025-10-06 21:09:37,481 p=27534 u=zuul n=ansible | community.general:10.0.1 was installed successfully 2025-10-06 21:09:37,481 p=27534 u=zuul n=ansible | Installing 'ansible.posix:1.6.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/posix' 2025-10-06 21:09:37,530 p=27534 u=zuul n=ansible | Created collection for ansible.posix:1.6.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/posix 2025-10-06 21:09:37,530 p=27534 u=zuul n=ansible | ansible.posix:1.6.2 was installed successfully 2025-10-06 21:09:37,530 p=27534 u=zuul n=ansible | Installing 'ansible.utils:5.1.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/utils' 2025-10-06 21:09:37,633 p=27534 u=zuul n=ansible | Created collection for ansible.utils:5.1.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/utils 2025-10-06 21:09:37,634 p=27534 u=zuul n=ansible | ansible.utils:5.1.2 was installed successfully 2025-10-06 21:09:37,634 p=27534 u=zuul n=ansible | Installing 'community.libvirt:1.3.0' to '/home/zuul/.ansible/collections/ansible_collections/community/libvirt' 2025-10-06 21:09:37,658 p=27534 u=zuul n=ansible | Created collection for community.libvirt:1.3.0 at /home/zuul/.ansible/collections/ansible_collections/community/libvirt 2025-10-06 21:09:37,658 p=27534 u=zuul n=ansible | community.libvirt:1.3.0 was installed successfully 2025-10-06 21:09:37,658 p=27534 u=zuul n=ansible | Installing 'community.crypto:2.22.3' to '/home/zuul/.ansible/collections/ansible_collections/community/crypto' 2025-10-06 21:09:37,809 p=27534 u=zuul n=ansible | Created collection for community.crypto:2.22.3 at /home/zuul/.ansible/collections/ansible_collections/community/crypto 2025-10-06 21:09:37,810 p=27534 u=zuul n=ansible | community.crypto:2.22.3 was installed successfully 2025-10-06 21:09:37,810 p=27534 u=zuul n=ansible | Installing 'kubernetes.core:5.0.0' to '/home/zuul/.ansible/collections/ansible_collections/kubernetes/core' 2025-10-06 21:09:37,931 p=27534 u=zuul n=ansible | Created collection for kubernetes.core:5.0.0 at /home/zuul/.ansible/collections/ansible_collections/kubernetes/core 2025-10-06 21:09:37,931 p=27534 u=zuul n=ansible | kubernetes.core:5.0.0 was installed successfully 2025-10-06 21:09:37,931 p=27534 u=zuul n=ansible | Installing 'ansible.netcommon:7.1.0' to '/home/zuul/.ansible/collections/ansible_collections/ansible/netcommon' 2025-10-06 21:09:37,996 p=27534 u=zuul n=ansible | Created collection for ansible.netcommon:7.1.0 at /home/zuul/.ansible/collections/ansible_collections/ansible/netcommon 2025-10-06 21:09:37,996 p=27534 u=zuul n=ansible | ansible.netcommon:7.1.0 was installed successfully 2025-10-06 21:09:37,996 p=27534 u=zuul n=ansible | Installing 'openstack.config_template:2.1.1' to '/home/zuul/.ansible/collections/ansible_collections/openstack/config_template' 2025-10-06 21:09:38,013 p=27534 u=zuul n=ansible | Created collection for openstack.config_template:2.1.1 at /home/zuul/.ansible/collections/ansible_collections/openstack/config_template 2025-10-06 21:09:38,013 p=27534 u=zuul n=ansible | openstack.config_template:2.1.1 was installed successfully 2025-10-06 21:09:38,013 p=27534 u=zuul n=ansible | Installing 'junipernetworks.junos:9.1.0' to '/home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos' 2025-10-06 21:09:38,233 p=27534 u=zuul n=ansible | Created collection for junipernetworks.junos:9.1.0 at /home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos 2025-10-06 21:09:38,233 p=27534 u=zuul n=ansible | junipernetworks.junos:9.1.0 was installed successfully 2025-10-06 21:09:38,233 p=27534 u=zuul n=ansible | Installing 'cisco.ios:9.0.3' to '/home/zuul/.ansible/collections/ansible_collections/cisco/ios' 2025-10-06 21:09:38,479 p=27534 u=zuul n=ansible | Created collection for cisco.ios:9.0.3 at /home/zuul/.ansible/collections/ansible_collections/cisco/ios 2025-10-06 21:09:38,480 p=27534 u=zuul n=ansible | cisco.ios:9.0.3 was installed successfully 2025-10-06 21:09:38,480 p=27534 u=zuul n=ansible | Installing 'mellanox.onyx:1.0.0' to '/home/zuul/.ansible/collections/ansible_collections/mellanox/onyx' 2025-10-06 21:09:38,511 p=27534 u=zuul n=ansible | Created collection for mellanox.onyx:1.0.0 at /home/zuul/.ansible/collections/ansible_collections/mellanox/onyx 2025-10-06 21:09:38,512 p=27534 u=zuul n=ansible | mellanox.onyx:1.0.0 was installed successfully 2025-10-06 21:09:38,512 p=27534 u=zuul n=ansible | Installing 'community.okd:4.0.0' to '/home/zuul/.ansible/collections/ansible_collections/community/okd' 2025-10-06 21:09:38,540 p=27534 u=zuul n=ansible | Created collection for community.okd:4.0.0 at /home/zuul/.ansible/collections/ansible_collections/community/okd 2025-10-06 21:09:38,540 p=27534 u=zuul n=ansible | community.okd:4.0.0 was installed successfully 2025-10-06 21:09:38,540 p=27534 u=zuul n=ansible | Installing '@NAMESPACE@.@NAME@:3.1.4' to '/home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@' 2025-10-06 21:09:38,633 p=27534 u=zuul n=ansible | Created collection for @NAMESPACE@.@NAME@:3.1.4 at /home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@ 2025-10-06 21:09:38,633 p=27534 u=zuul n=ansible | @NAMESPACE@.@NAME@:3.1.4 was installed successfully 2025-10-06 21:09:48,364 p=28169 u=zuul n=ansible | PLAY [Remove status flag] ****************************************************** 2025-10-06 21:09:48,383 p=28169 u=zuul n=ansible | TASK [Gathering Facts ] ******************************************************** 2025-10-06 21:09:48,383 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:48 +0000 (0:00:00.036) 0:00:00.036 ******** 2025-10-06 21:09:49,294 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:49,309 p=28169 u=zuul n=ansible | TASK [Delete success flag if exists path={{ ansible_user_dir }}/cifmw-success, state=absent] *** 2025-10-06 21:09:49,310 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.926) 0:00:00.962 ******** 2025-10-06 21:09:49,602 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:49,615 p=28169 u=zuul n=ansible | TASK [Inherit from parent scenarios if needed _raw_params=ci/playbooks/tasks/inherit_parent_scenario.yml] *** 2025-10-06 21:09:49,615 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.305) 0:00:01.267 ******** 2025-10-06 21:09:49,678 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/tasks/inherit_parent_scenario.yml for localhost 2025-10-06 21:09:49,729 p=28169 u=zuul n=ansible | TASK [Inherit from parent parameter file if instructed file={{ item }}] ******** 2025-10-06 21:09:49,729 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.114) 0:00:01.382 ******** 2025-10-06 21:09:49,766 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:09:49,773 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Set custom cifmw PATH reusable fact cifmw_path={{ ansible_user_dir }}/.crc/bin:{{ ansible_user_dir }}/.crc/bin/oc:{{ ansible_user_dir }}/bin:{{ ansible_env.PATH }}, cacheable=True] *** 2025-10-06 21:09:49,773 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.043) 0:00:01.425 ******** 2025-10-06 21:09:49,810 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:49,816 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Get customized parameters ci_framework_params={{ hostvars[inventory_hostname] | dict2items | selectattr("key", "match", "^(cifmw|pre|post)_(?!install_yamls|openshift_token|openshift_login|openshift_kubeconfig).*") | list | items2dict }}] *** 2025-10-06 21:09:49,816 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.042) 0:00:01.468 ******** 2025-10-06 21:09:49,893 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:49,899 p=28169 u=zuul n=ansible | TASK [install_ca : Ensure target directory exists path={{ cifmw_install_ca_trust_dir }}, state=directory, mode=0755] *** 2025-10-06 21:09:49,899 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:49 +0000 (0:00:00.083) 0:00:01.552 ******** 2025-10-06 21:09:50,090 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:50,096 p=28169 u=zuul n=ansible | TASK [install_ca : Install internal CA from url url={{ cifmw_install_ca_url }}, dest={{ cifmw_install_ca_trust_dir }}, validate_certs={{ cifmw_install_ca_url_validate_certs | default(omit) }}, mode=0644] *** 2025-10-06 21:09:50,096 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:50 +0000 (0:00:00.197) 0:00:01.749 ******** 2025-10-06 21:09:50,115 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:09:50,123 p=28169 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from inline dest={{ cifmw_install_ca_trust_dir }}/cifmw_inline_ca_bundle.crt, content={{ cifmw_install_ca_bundle_inline }}, mode=0644] *** 2025-10-06 21:09:50,123 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:50 +0000 (0:00:00.026) 0:00:01.776 ******** 2025-10-06 21:09:50,144 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:09:50,150 p=28169 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from file dest={{ cifmw_install_ca_trust_dir }}/{{ cifmw_install_ca_bundle_src | basename }}, src={{ cifmw_install_ca_bundle_src }}, mode=0644] *** 2025-10-06 21:09:50,151 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:50 +0000 (0:00:00.027) 0:00:01.803 ******** 2025-10-06 21:09:50,169 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:09:50,176 p=28169 u=zuul n=ansible | TASK [install_ca : Update ca bundle _raw_params=update-ca-trust] *************** 2025-10-06 21:09:50,176 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:50 +0000 (0:00:00.025) 0:00:01.828 ******** 2025-10-06 21:09:51,592 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:09:51,603 p=28169 u=zuul n=ansible | TASK [repo_setup : Ensure directories are present path={{ cifmw_repo_setup_basedir }}/{{ item }}, state=directory, mode=0755] *** 2025-10-06 21:09:51,603 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:51 +0000 (0:00:01.427) 0:00:03.256 ******** 2025-10-06 21:09:51,810 p=28169 u=zuul n=ansible | changed: [localhost] => (item=tmp) 2025-10-06 21:09:51,973 p=28169 u=zuul n=ansible | changed: [localhost] => (item=artifacts/repositories) 2025-10-06 21:09:52,186 p=28169 u=zuul n=ansible | changed: [localhost] => (item=venv/repo_setup) 2025-10-06 21:09:52,202 p=28169 u=zuul n=ansible | TASK [repo_setup : Make sure git-core package is installed name=git-core, state=present] *** 2025-10-06 21:09:52,202 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:52 +0000 (0:00:00.598) 0:00:03.854 ******** 2025-10-06 21:09:53,193 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:09:53,200 p=28169 u=zuul n=ansible | TASK [repo_setup : Get repo-setup repository accept_hostkey=True, dest={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, repo={{ cifmw_repo_setup_src }}] *** 2025-10-06 21:09:53,200 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:53 +0000 (0:00:00.997) 0:00:04.852 ******** 2025-10-06 21:09:54,372 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:09:54,380 p=28169 u=zuul n=ansible | TASK [repo_setup : Initialize python venv and install requirements virtualenv={{ cifmw_repo_setup_venv }}, requirements={{ cifmw_repo_setup_basedir }}/tmp/repo-setup/requirements.txt, virtualenv_command=python3 -m venv --system-site-packages --upgrade-deps] *** 2025-10-06 21:09:54,380 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:09:54 +0000 (0:00:01.179) 0:00:06.032 ******** 2025-10-06 21:10:02,777 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:02,784 p=28169 u=zuul n=ansible | TASK [repo_setup : Install repo-setup package chdir={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, creates={{ cifmw_repo_setup_venv }}/bin/repo-setup, _raw_params={{ cifmw_repo_setup_venv }}/bin/python setup.py install] *** 2025-10-06 21:10:02,784 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:02 +0000 (0:00:08.404) 0:00:14.436 ******** 2025-10-06 21:10:03,629 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:03,636 p=28169 u=zuul n=ansible | TASK [repo_setup : Set cifmw_repo_setup_dlrn_hash_tag from content provider cifmw_repo_setup_dlrn_hash_tag={{ content_provider_dlrn_md5_hash }}] *** 2025-10-06 21:10:03,636 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:03 +0000 (0:00:00.852) 0:00:15.289 ******** 2025-10-06 21:10:03,674 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:03,681 p=28169 u=zuul n=ansible | TASK [repo_setup : Run repo-setup _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup {{ cifmw_repo_setup_promotion }} {{ cifmw_repo_setup_additional_repos }} -d {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} -b {{ cifmw_repo_setup_branch }} --rdo-mirror {{ cifmw_repo_setup_rdo_mirror }} {% if cifmw_repo_setup_dlrn_hash_tag | length > 0 %} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif %} -o {{ cifmw_repo_setup_output }}] *** 2025-10-06 21:10:03,681 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:03 +0000 (0:00:00.044) 0:00:15.333 ******** 2025-10-06 21:10:04,351 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:04,357 p=28169 u=zuul n=ansible | TASK [repo_setup : Get component repo url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/component/{{ cifmw_repo_setup_component_name }}/{{ cifmw_repo_setup_component_promotion_tag }}/delorean.repo, dest={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, mode=0644] *** 2025-10-06 21:10:04,357 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:04 +0000 (0:00:00.676) 0:00:16.010 ******** 2025-10-06 21:10:04,386 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:04,393 p=28169 u=zuul n=ansible | TASK [repo_setup : Rename component repo path={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, regexp=delorean-component-{{ cifmw_repo_setup_component_name }}, replace={{ cifmw_repo_setup_component_name }}-{{ cifmw_repo_setup_component_promotion_tag }}] *** 2025-10-06 21:10:04,393 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:04 +0000 (0:00:00.035) 0:00:16.046 ******** 2025-10-06 21:10:04,426 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:04,435 p=28169 u=zuul n=ansible | TASK [repo_setup : Disable component repo in current-podified dlrn repo path={{ cifmw_repo_setup_output }}/delorean.repo, section=delorean-component-{{ cifmw_repo_setup_component_name }}, option=enabled, value=0, mode=0644] *** 2025-10-06 21:10:04,435 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:04 +0000 (0:00:00.041) 0:00:16.087 ******** 2025-10-06 21:10:04,468 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:04,476 p=28169 u=zuul n=ansible | TASK [repo_setup : Run repo-setup-get-hash _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup-get-hash --dlrn-url {{ cifmw_repo_setup_dlrn_uri[:-1] }} --os-version {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} --release {{ cifmw_repo_setup_branch }} {% if cifmw_repo_setup_component_name | length > 0 -%} --component {{ cifmw_repo_setup_component_name }} --tag {{ cifmw_repo_setup_component_promotion_tag }} {% else -%} --tag {{cifmw_repo_setup_promotion }} {% endif -%} {% if (cifmw_repo_setup_dlrn_hash_tag | length > 0) and (cifmw_repo_setup_component_name | length <= 0) -%} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif -%} --json] *** 2025-10-06 21:10:04,476 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:04 +0000 (0:00:00.041) 0:00:16.129 ******** 2025-10-06 21:10:04,918 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:04,931 p=28169 u=zuul n=ansible | TASK [repo_setup : Dump full hash in delorean.repo.md5 file content={{ _repo_setup_json['full_hash'] }} , dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2025-10-06 21:10:04,931 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:04 +0000 (0:00:00.454) 0:00:16.584 ******** 2025-10-06 21:10:05,576 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:05,583 p=28169 u=zuul n=ansible | TASK [repo_setup : Dump current-podified hash url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/current-podified/delorean.repo.md5, dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2025-10-06 21:10:05,583 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.651) 0:00:17.235 ******** 2025-10-06 21:10:05,616 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,623 p=28169 u=zuul n=ansible | TASK [repo_setup : Slurp current podified hash src={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5] *** 2025-10-06 21:10:05,623 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.040) 0:00:17.275 ******** 2025-10-06 21:10:05,639 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,645 p=28169 u=zuul n=ansible | TASK [repo_setup : Update the value of full_hash _repo_setup_json={{ _repo_setup_json | combine({'full_hash': _hash}, recursive=true) }}] *** 2025-10-06 21:10:05,645 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.022) 0:00:17.298 ******** 2025-10-06 21:10:05,662 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,668 p=28169 u=zuul n=ansible | TASK [repo_setup : Export hashes facts for further use cifmw_repo_setup_full_hash={{ _repo_setup_json['full_hash'] }}, cifmw_repo_setup_commit_hash={{ _repo_setup_json['commit_hash'] }}, cifmw_repo_setup_distro_hash={{ _repo_setup_json['distro_hash'] }}, cifmw_repo_setup_extended_hash={{ _repo_setup_json['extended_hash'] }}, cifmw_repo_setup_dlrn_api_url={{ _repo_setup_json['dlrn_api_url'] }}, cifmw_repo_setup_dlrn_url={{ _repo_setup_json['dlrn_url'] }}, cifmw_repo_setup_release={{ _repo_setup_json['release'] }}, cacheable=True] *** 2025-10-06 21:10:05,668 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.022) 0:00:17.321 ******** 2025-10-06 21:10:05,695 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:05,701 p=28169 u=zuul n=ansible | TASK [repo_setup : Create download directory path={{ cifmw_repo_setup_rhos_release_path }}, state=directory, mode=0755] *** 2025-10-06 21:10:05,701 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.032) 0:00:17.354 ******** 2025-10-06 21:10:05,715 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,722 p=28169 u=zuul n=ansible | TASK [repo_setup : Print the URL to request msg={{ cifmw_repo_setup_rhos_release_rpm }}] *** 2025-10-06 21:10:05,722 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.020) 0:00:17.375 ******** 2025-10-06 21:10:05,736 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,743 p=28169 u=zuul n=ansible | TASK [Download the RPM name=krb_request] *************************************** 2025-10-06 21:10:05,743 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.020) 0:00:17.395 ******** 2025-10-06 21:10:05,757 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,764 p=28169 u=zuul n=ansible | TASK [repo_setup : Install RHOS Release tool name={{ cifmw_repo_setup_rhos_release_rpm if cifmw_repo_setup_rhos_release_rpm is not url else cifmw_krb_request_out.path }}, state=present, disable_gpg_check={{ cifmw_repo_setup_rhos_release_gpg_check | bool }}] *** 2025-10-06 21:10:05,764 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.021) 0:00:17.416 ******** 2025-10-06 21:10:05,778 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,784 p=28169 u=zuul n=ansible | TASK [repo_setup : Get rhos-release tool version _raw_params=rhos-release --version] *** 2025-10-06 21:10:05,785 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.020) 0:00:17.437 ******** 2025-10-06 21:10:05,798 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,806 p=28169 u=zuul n=ansible | TASK [repo_setup : Print rhos-release tool version msg={{ rr_version.stdout }}] *** 2025-10-06 21:10:05,806 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.021) 0:00:17.458 ******** 2025-10-06 21:10:05,819 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,827 p=28169 u=zuul n=ansible | TASK [repo_setup : Generate repos using rhos-release {{ cifmw_repo_setup_rhos_release_args }} _raw_params=rhos-release {{ cifmw_repo_setup_rhos_release_args }} \ -t {{ cifmw_repo_setup_output }}] *** 2025-10-06 21:10:05,827 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.021) 0:00:17.480 ******** 2025-10-06 21:10:05,841 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:05,847 p=28169 u=zuul n=ansible | TASK [repo_setup : Check for /etc/ci/mirror_info.sh path=/etc/ci/mirror_info.sh] *** 2025-10-06 21:10:05,847 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:05 +0000 (0:00:00.019) 0:00:17.500 ******** 2025-10-06 21:10:06,042 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:06,048 p=28169 u=zuul n=ansible | TASK [repo_setup : Use RDO proxy mirrors chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|https://trunk.rdoproject.org|$NODEPOOL_RDO_PROXY|g" *.repo ] *** 2025-10-06 21:10:06,048 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:06 +0000 (0:00:00.201) 0:00:17.701 ******** 2025-10-06 21:10:06,242 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:06,248 p=28169 u=zuul n=ansible | TASK [repo_setup : Use RDO CentOS mirrors (remove CentOS 10 conditional when Nodepool mirrors exist) chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|http://mirror.stream.centos.org|$NODEPOOL_CENTOS_MIRROR|g" *.repo ] *** 2025-10-06 21:10:06,248 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:06 +0000 (0:00:00.199) 0:00:17.901 ******** 2025-10-06 21:10:06,452 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:06,458 p=28169 u=zuul n=ansible | TASK [repo_setup : Check for gating.repo file on content provider url=http://{{ content_provider_registry_ip }}:8766/gating.repo] *** 2025-10-06 21:10:06,458 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:06 +0000 (0:00:00.210) 0:00:18.111 ******** 2025-10-06 21:10:06,957 p=28169 u=zuul n=ansible | fatal: [localhost]: FAILED! => changed: false elapsed: 0 msg: 'Status code was -1 and not [200]: Request failed: ' redirected: false status: -1 url: http://38.102.83.53:8766/gating.repo 2025-10-06 21:10:06,957 p=28169 u=zuul n=ansible | ...ignoring 2025-10-06 21:10:06,964 p=28169 u=zuul n=ansible | TASK [repo_setup : Populate gating repo from content provider ip content=[gating-repo] baseurl=http://{{ content_provider_registry_ip }}:8766/ enabled=1 gpgcheck=0 priority=1 , dest={{ cifmw_repo_setup_output }}/gating.repo, mode=0644] *** 2025-10-06 21:10:06,964 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:06 +0000 (0:00:00.505) 0:00:18.616 ******** 2025-10-06 21:10:06,990 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:07,000 p=28169 u=zuul n=ansible | TASK [repo_setup : Check for DLRN repo at the destination path={{ cifmw_repo_setup_output }}/delorean.repo] *** 2025-10-06 21:10:07,000 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.036) 0:00:18.653 ******** 2025-10-06 21:10:07,028 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:07,035 p=28169 u=zuul n=ansible | TASK [repo_setup : Lower the priority of DLRN repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}/delorean.repo, regexp=priority=1, replace=priority=20] *** 2025-10-06 21:10:07,035 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.035) 0:00:18.688 ******** 2025-10-06 21:10:07,061 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:07,068 p=28169 u=zuul n=ansible | TASK [repo_setup : Check for DLRN component repo path={{ cifmw_repo_setup_output }}/{{ _comp_repo }}] *** 2025-10-06 21:10:07,068 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.032) 0:00:18.720 ******** 2025-10-06 21:10:07,094 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:07,103 p=28169 u=zuul n=ansible | TASK [repo_setup : Lower the priority of componennt repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}//{{ _comp_repo }}, regexp=priority=1, replace=priority=2] *** 2025-10-06 21:10:07,103 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.035) 0:00:18.756 ******** 2025-10-06 21:10:07,127 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:07,135 p=28169 u=zuul n=ansible | TASK [repo_setup : Find existing repos from /etc/yum.repos.d directory paths=/etc/yum.repos.d/, patterns=*.repo, recurse=False] *** 2025-10-06 21:10:07,136 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.032) 0:00:18.788 ******** 2025-10-06 21:10:07,422 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:07,427 p=28169 u=zuul n=ansible | TASK [repo_setup : Remove existing repos from /etc/yum.repos.d directory path={{ item }}, state=absent] *** 2025-10-06 21:10:07,428 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.291) 0:00:19.080 ******** 2025-10-06 21:10:07,638 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos-addons.repo) 2025-10-06 21:10:07,831 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos.repo) 2025-10-06 21:10:07,839 p=28169 u=zuul n=ansible | TASK [repo_setup : Cleanup existing metadata _raw_params=dnf clean metadata] *** 2025-10-06 21:10:07,839 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:07 +0000 (0:00:00.411) 0:00:19.491 ******** 2025-10-06 21:10:08,285 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:08,299 p=28169 u=zuul n=ansible | TASK [repo_setup : Copy generated repos to /etc/yum.repos.d directory mode=0755, remote_src=True, src={{ cifmw_repo_setup_output }}/, dest=/etc/yum.repos.d] *** 2025-10-06 21:10:08,299 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:08 +0000 (0:00:00.460) 0:00:19.951 ******** 2025-10-06 21:10:08,590 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:08,602 p=28169 u=zuul n=ansible | TASK [ci_setup : Gather variables for each operating system _raw_params={{ item }}] *** 2025-10-06 21:10:08,602 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:08 +0000 (0:00:00.303) 0:00:20.255 ******** 2025-10-06 21:10:08,645 p=28169 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_setup/vars/redhat.yml) 2025-10-06 21:10:08,653 p=28169 u=zuul n=ansible | TASK [ci_setup : List packages to install var=cifmw_ci_setup_packages] ********* 2025-10-06 21:10:08,654 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:08 +0000 (0:00:00.051) 0:00:20.306 ******** 2025-10-06 21:10:08,674 p=28169 u=zuul n=ansible | ok: [localhost] => cifmw_ci_setup_packages: - bash-completion - ca-certificates - git-core - make - tar - tmux - python3-pip 2025-10-06 21:10:08,680 p=28169 u=zuul n=ansible | TASK [ci_setup : Install needed packages name={{ cifmw_ci_setup_packages }}, state=latest] *** 2025-10-06 21:10:08,680 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:08 +0000 (0:00:00.026) 0:00:20.332 ******** 2025-10-06 21:10:34,121 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:34,129 p=28169 u=zuul n=ansible | TASK [ci_setup : Gather version of openshift client _raw_params=oc version --client -o yaml] *** 2025-10-06 21:10:34,129 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:34 +0000 (0:00:25.449) 0:00:45.782 ******** 2025-10-06 21:10:34,332 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:34,338 p=28169 u=zuul n=ansible | TASK [ci_setup : Ensure openshift client install path is present path={{ cifmw_ci_setup_oc_install_path }}, state=directory, mode=0755] *** 2025-10-06 21:10:34,338 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:34 +0000 (0:00:00.208) 0:00:45.990 ******** 2025-10-06 21:10:34,526 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:34,534 p=28169 u=zuul n=ansible | TASK [ci_setup : Install openshift client src={{ cifmw_ci_setup_openshift_client_download_uri }}/{{ cifmw_ci_setup_openshift_client_version }}/openshift-client-linux.tar.gz, dest={{ cifmw_ci_setup_oc_install_path }}, remote_src=True, mode=0755, creates={{ cifmw_ci_setup_oc_install_path }}/oc] *** 2025-10-06 21:10:34,534 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:34 +0000 (0:00:00.196) 0:00:46.187 ******** 2025-10-06 21:10:39,633 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:39,647 p=28169 u=zuul n=ansible | TASK [ci_setup : Add the OC path to cifmw_path if needed cifmw_path={{ cifmw_ci_setup_oc_install_path }}:{{ ansible_env.PATH }}, cacheable=True] *** 2025-10-06 21:10:39,648 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:39 +0000 (0:00:05.113) 0:00:51.300 ******** 2025-10-06 21:10:39,674 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:39,681 p=28169 u=zuul n=ansible | TASK [ci_setup : Create completion file] *************************************** 2025-10-06 21:10:39,681 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:39 +0000 (0:00:00.033) 0:00:51.333 ******** 2025-10-06 21:10:39,970 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:39,979 p=28169 u=zuul n=ansible | TASK [ci_setup : Source completion from within .bashrc create=True, mode=0644, path={{ ansible_user_dir }}/.bashrc, block=if [ -f ~/.oc_completion ]; then source ~/.oc_completion fi] *** 2025-10-06 21:10:39,979 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:39 +0000 (0:00:00.298) 0:00:51.632 ******** 2025-10-06 21:10:40,258 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:40,264 p=28169 u=zuul n=ansible | TASK [ci_setup : Check rhsm status _raw_params=subscription-manager status] **** 2025-10-06 21:10:40,265 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.285) 0:00:51.917 ******** 2025-10-06 21:10:40,278 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,285 p=28169 u=zuul n=ansible | TASK [ci_setup : Gather the repos to be enabled _repos={{ cifmw_ci_setup_rhel_rhsm_default_repos + (cifmw_ci_setup_rhel_rhsm_extra_repos | default([])) }}] *** 2025-10-06 21:10:40,285 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.020) 0:00:51.937 ******** 2025-10-06 21:10:40,298 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,304 p=28169 u=zuul n=ansible | TASK [ci_setup : Enabling the required repositories. name={{ item }}, state={{ rhsm_repo_state | default('enabled') }}] *** 2025-10-06 21:10:40,305 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.019) 0:00:51.957 ******** 2025-10-06 21:10:40,319 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,326 p=28169 u=zuul n=ansible | TASK [ci_setup : Get current /etc/redhat-release _raw_params=cat /etc/redhat-release] *** 2025-10-06 21:10:40,326 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.021) 0:00:51.979 ******** 2025-10-06 21:10:40,340 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,347 p=28169 u=zuul n=ansible | TASK [ci_setup : Print current /etc/redhat-release msg={{ _current_rh_release.stdout }}] *** 2025-10-06 21:10:40,347 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.020) 0:00:51.999 ******** 2025-10-06 21:10:40,361 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,367 p=28169 u=zuul n=ansible | TASK [ci_setup : Ensure the repos are enabled in the system using yum name={{ item.name }}, baseurl={{ item.baseurl }}, description={{ item.description | default(item.name) }}, gpgcheck={{ item.gpgcheck | default(false) }}, enabled=True, state={{ yum_repo_state | default('present') }}] *** 2025-10-06 21:10:40,367 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.020) 0:00:52.020 ******** 2025-10-06 21:10:40,386 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:40,392 p=28169 u=zuul n=ansible | TASK [ci_setup : Manage directories path={{ item }}, state={{ directory_state }}, mode=0755, owner={{ ansible_user_id }}, group={{ ansible_user_id }}] *** 2025-10-06 21:10:40,393 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:40 +0000 (0:00:00.025) 0:00:52.045 ******** 2025-10-06 21:10:40,620 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/manifests/openstack/cr) 2025-10-06 21:10:40,812 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/logs) 2025-10-06 21:10:41,028 p=28169 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/tmp) 2025-10-06 21:10:41,254 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/volumes) 2025-10-06 21:10:41,466 p=28169 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-10-06 21:10:41,478 p=28169 u=zuul n=ansible | TASK [Prepare install_yamls make targets name=install_yamls, apply={'tags': ['bootstrap']}] *** 2025-10-06 21:10:41,478 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:41 +0000 (0:00:01.085) 0:00:53.131 ******** 2025-10-06 21:10:41,622 p=28169 u=zuul n=ansible | TASK [install_yamls : Ensure directories exist path={{ item }}, state=directory, mode=0755] *** 2025-10-06 21:10:41,622 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:41 +0000 (0:00:00.143) 0:00:53.275 ******** 2025-10-06 21:10:41,838 p=28169 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts) 2025-10-06 21:10:42,034 p=28169 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes/tasks) 2025-10-06 21:10:42,186 p=28169 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2025-10-06 21:10:42,200 p=28169 u=zuul n=ansible | TASK [Create variables with local repos based on Zuul items name=install_yamls, tasks_from=zuul_set_operators_repo.yml] *** 2025-10-06 21:10:42,200 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.577) 0:00:53.852 ******** 2025-10-06 21:10:42,243 p=28169 u=zuul n=ansible | TASK [install_yamls : Set fact with local repos based on Zuul items cifmw_install_yamls_operators_repo={{ cifmw_install_yamls_operators_repo | default({}) | combine(_repo_operator_info | items2dict) }}] *** 2025-10-06 21:10:42,243 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.043) 0:00:53.896 ******** 2025-10-06 21:10:42,295 p=28169 u=zuul n=ansible | ok: [localhost] => (item={'branch': 'main', 'change': '287', 'change_url': 'https://github.com/openstack-k8s-operators/watcher-operator/pull/287', 'commit_id': '14377136e67c9cd67507a059bfde2f19f140387d', 'patchset': '14377136e67c9cd67507a059bfde2f19f140387d', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/openstack-k8s-operators/watcher-operator', 'name': 'openstack-k8s-operators/watcher-operator', 'short_name': 'watcher-operator', 'src_dir': 'src/github.com/openstack-k8s-operators/watcher-operator'}, 'topic': None}) 2025-10-06 21:10:42,302 p=28169 u=zuul n=ansible | TASK [install_yamls : Print helpful data for debugging msg=_repo_operator_name: {{ _repo_operator_name }} _repo_operator_info: {{ _repo_operator_info }} cifmw_install_yamls_operators_repo: {{ cifmw_install_yamls_operators_repo }} ] *** 2025-10-06 21:10:42,302 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.058) 0:00:53.954 ******** 2025-10-06 21:10:42,349 p=28169 u=zuul n=ansible | ok: [localhost] => (item={'branch': 'main', 'change': '287', 'change_url': 'https://github.com/openstack-k8s-operators/watcher-operator/pull/287', 'commit_id': '14377136e67c9cd67507a059bfde2f19f140387d', 'patchset': '14377136e67c9cd67507a059bfde2f19f140387d', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/openstack-k8s-operators/watcher-operator', 'name': 'openstack-k8s-operators/watcher-operator', 'short_name': 'watcher-operator', 'src_dir': 'src/github.com/openstack-k8s-operators/watcher-operator'}, 'topic': None}) => msg: | _repo_operator_name: watcher _repo_operator_info: [{'key': 'WATCHER_REPO', 'value': '/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator'}, {'key': 'WATCHER_BRANCH', 'value': ''}] cifmw_install_yamls_operators_repo: {'WATCHER_REPO': '/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator', 'WATCHER_BRANCH': ''} 2025-10-06 21:10:42,361 p=28169 u=zuul n=ansible | TASK [install_yamls : Compute the cifmw_install_yamls_vars final value _install_yamls_override_vars={{ _install_yamls_override_vars | default({}) | combine(item, recursive=True) }}] *** 2025-10-06 21:10:42,362 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.059) 0:00:54.014 ******** 2025-10-06 21:10:42,469 p=28169 u=zuul n=ansible | ok: [localhost] => (item={'BMO_SETUP': False, 'INSTALL_CERT_MANAGER': False}) 2025-10-06 21:10:42,477 p=28169 u=zuul n=ansible | TASK [install_yamls : Set environment override cifmw_install_yamls_environment fact cifmw_install_yamls_environment={{ _install_yamls_override_vars.keys() | map('upper') | zip(_install_yamls_override_vars.values()) | items2dict(key_name=0, value_name=1) | combine({ 'OUT': cifmw_install_yamls_manifests_dir, 'OUTPUT_DIR': cifmw_install_yamls_edpm_dir, 'CHECKOUT_FROM_OPENSTACK_REF': cifmw_install_yamls_checkout_openstack_ref, 'OPENSTACK_K8S_BRANCH': (zuul is defined and not zuul.branch |regex_search('master|rhos')) | ternary(zuul.branch, 'main') }) | combine(install_yamls_operators_repos) }}, cacheable=True] *** 2025-10-06 21:10:42,477 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.115) 0:00:54.129 ******** 2025-10-06 21:10:42,514 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:42,520 p=28169 u=zuul n=ansible | TASK [install_yamls : Get environment structure base_path={{ cifmw_install_yamls_repo }}] *** 2025-10-06 21:10:42,521 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:42 +0000 (0:00:00.043) 0:00:54.173 ******** 2025-10-06 21:10:43,052 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:43,059 p=28169 u=zuul n=ansible | TASK [install_yamls : Ensure Output directory exists path={{ cifmw_install_yamls_out_dir }}, state=directory, mode=0755] *** 2025-10-06 21:10:43,059 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.538) 0:00:54.712 ******** 2025-10-06 21:10:43,245 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:43,252 p=28169 u=zuul n=ansible | TASK [install_yamls : Ensure user cifmw_install_yamls_vars contains existing Makefile variables that=_cifmw_install_yamls_unmatched_vars | length == 0, msg=cifmw_install_yamls_vars contains a variable that is not defined in install_yamls Makefile nor cifmw_install_yamls_whitelisted_vars: {{ _cifmw_install_yamls_unmatched_vars | join(', ')}}, quiet=True] *** 2025-10-06 21:10:43,252 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.192) 0:00:54.904 ******** 2025-10-06 21:10:43,283 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:43,296 p=28169 u=zuul n=ansible | TASK [install_yamls : Generate /home/zuul/ci-framework-data/artifacts/install_yamls.sh dest={{ cifmw_install_yamls_out_dir }}/{{ cifmw_install_yamls_envfile }}, content={% for k,v in cifmw_install_yamls_environment.items() %} export {{ k }}={{ v }} {% endfor %}, mode=0644] *** 2025-10-06 21:10:43,296 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.044) 0:00:54.948 ******** 2025-10-06 21:10:43,723 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:43,735 p=28169 u=zuul n=ansible | TASK [install_yamls : Set install_yamls default values cifmw_install_yamls_defaults={{ get_makefiles_env_output.makefiles_values | combine(cifmw_install_yamls_environment) }}, cacheable=True] *** 2025-10-06 21:10:43,735 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.438) 0:00:55.387 ******** 2025-10-06 21:10:43,757 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:43,764 p=28169 u=zuul n=ansible | TASK [install_yamls : Show the env structure var=cifmw_install_yamls_environment] *** 2025-10-06 21:10:43,764 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.029) 0:00:55.417 ******** 2025-10-06 21:10:43,778 p=28169 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_environment: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' INSTALL_CERT_MANAGER: false OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm WATCHER_BRANCH: '' WATCHER_REPO: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator 2025-10-06 21:10:43,785 p=28169 u=zuul n=ansible | TASK [install_yamls : Show the env structure defaults var=cifmw_install_yamls_defaults] *** 2025-10-06 21:10:43,785 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.021) 0:00:55.438 ******** 2025-10-06 21:10:43,809 p=28169 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/tests/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/tests/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: false BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/tests/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 767c3ed056cbaa3b9dfedb8c6f825bf0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/tests/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/tests/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/tests/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: false INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/tests/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/tests/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '1234567842' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/tests/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/tests/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/tests/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12345678' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/tests/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: osp-secret SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/tests/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: tests/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' WATCHER_BRANCH: '' WATCHER_REPO: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator tripleo_deploy: 'export REGISTRY_USER:' 2025-10-06 21:10:43,816 p=28169 u=zuul n=ansible | TASK [install_yamls : Generate make targets install_yamls_path={{ cifmw_install_yamls_repo }}, output_directory={{ cifmw_install_yamls_tasks_out }}] *** 2025-10-06 21:10:43,816 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:43 +0000 (0:00:00.030) 0:00:55.468 ******** 2025-10-06 21:10:44,118 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:44,128 p=28169 u=zuul n=ansible | TASK [install_yamls : Debug generate_make module var=cifmw_generate_makes] ***** 2025-10-06 21:10:44,128 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:44 +0000 (0:00:00.312) 0:00:55.780 ******** 2025-10-06 21:10:44,149 p=28169 u=zuul n=ansible | ok: [localhost] => cifmw_generate_makes: changed: false debug: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/Makefile: - all - help - cleanup - deploy_cleanup - wait - crc_storage - crc_storage_cleanup - crc_storage_release - crc_storage_with_retries - crc_storage_cleanup_with_retries - operator_namespace - namespace - namespace_cleanup - input - input_cleanup - crc_bmo_setup - crc_bmo_cleanup - openstack_prep - openstack - openstack_wait - openstack_init - openstack_cleanup - openstack_repo - openstack_deploy_prep - openstack_deploy - openstack_wait_deploy - openstack_deploy_cleanup - openstack_update_run - update_services - update_system - openstack_patch_version - edpm_deploy_generate_keys - edpm_patch_ansible_runner_image - edpm_deploy_prep - edpm_deploy_cleanup - edpm_deploy - edpm_deploy_baremetal_prep - edpm_deploy_baremetal - edpm_wait_deploy_baremetal - edpm_wait_deploy - edpm_register_dns - edpm_nova_discover_hosts - openstack_crds - openstack_crds_cleanup - edpm_deploy_networker_prep - edpm_deploy_networker_cleanup - edpm_deploy_networker - infra_prep - infra - infra_cleanup - dns_deploy_prep - dns_deploy - dns_deploy_cleanup - netconfig_deploy_prep - netconfig_deploy - netconfig_deploy_cleanup - memcached_deploy_prep - memcached_deploy - memcached_deploy_cleanup - keystone_prep - keystone - keystone_cleanup - keystone_deploy_prep - keystone_deploy - keystone_deploy_cleanup - barbican_prep - barbican - barbican_cleanup - barbican_deploy_prep - barbican_deploy - barbican_deploy_validate - barbican_deploy_cleanup - mariadb - mariadb_cleanup - mariadb_deploy_prep - mariadb_deploy - mariadb_deploy_cleanup - placement_prep - placement - placement_cleanup - placement_deploy_prep - placement_deploy - placement_deploy_cleanup - glance_prep - glance - glance_cleanup - glance_deploy_prep - glance_deploy - glance_deploy_cleanup - ovn_prep - ovn - ovn_cleanup - ovn_deploy_prep - ovn_deploy - ovn_deploy_cleanup - neutron_prep - neutron - neutron_cleanup - neutron_deploy_prep - neutron_deploy - neutron_deploy_cleanup - cinder_prep - cinder - cinder_cleanup - cinder_deploy_prep - cinder_deploy - cinder_deploy_cleanup - rabbitmq_prep - rabbitmq - rabbitmq_cleanup - rabbitmq_deploy_prep - rabbitmq_deploy - rabbitmq_deploy_cleanup - ironic_prep - ironic - ironic_cleanup - ironic_deploy_prep - ironic_deploy - ironic_deploy_cleanup - octavia_prep - octavia - octavia_cleanup - octavia_deploy_prep - octavia_deploy - octavia_deploy_cleanup - designate_prep - designate - designate_cleanup - designate_deploy_prep - designate_deploy - designate_deploy_cleanup - nova_prep - nova - nova_cleanup - nova_deploy_prep - nova_deploy - nova_deploy_cleanup - mariadb_kuttl_run - mariadb_kuttl - kuttl_db_prep - kuttl_db_cleanup - kuttl_common_prep - kuttl_common_cleanup - keystone_kuttl_run - keystone_kuttl - barbican_kuttl_run - barbican_kuttl - placement_kuttl_run - placement_kuttl - cinder_kuttl_run - cinder_kuttl - neutron_kuttl_run - neutron_kuttl - octavia_kuttl_run - octavia_kuttl - designate_kuttl - designate_kuttl_run - ovn_kuttl_run - ovn_kuttl - infra_kuttl_run - infra_kuttl - ironic_kuttl_run - ironic_kuttl - ironic_kuttl_crc - heat_kuttl_run - heat_kuttl - heat_kuttl_crc - ansibleee_kuttl_run - ansibleee_kuttl_cleanup - ansibleee_kuttl_prep - ansibleee_kuttl - glance_kuttl_run - glance_kuttl - manila_kuttl_run - manila_kuttl - swift_kuttl_run - swift_kuttl - horizon_kuttl_run - horizon_kuttl - openstack_kuttl_run - openstack_kuttl - mariadb_chainsaw_run - mariadb_chainsaw - horizon_prep - horizon - horizon_cleanup - horizon_deploy_prep - horizon_deploy - horizon_deploy_cleanup - heat_prep - heat - heat_cleanup - heat_deploy_prep - heat_deploy - heat_deploy_cleanup - ansibleee_prep - ansibleee - ansibleee_cleanup - baremetal_prep - baremetal - baremetal_cleanup - ceph_help - ceph - ceph_cleanup - rook_prep - rook - rook_deploy_prep - rook_deploy - rook_crc_disk - rook_cleanup - lvms - nmstate - nncp - nncp_cleanup - netattach - netattach_cleanup - metallb - metallb_config - metallb_config_cleanup - metallb_cleanup - loki - loki_cleanup - loki_deploy - loki_deploy_cleanup - netobserv - netobserv_cleanup - netobserv_deploy - netobserv_deploy_cleanup - manila_prep - manila - manila_cleanup - manila_deploy_prep - manila_deploy - manila_deploy_cleanup - telemetry_prep - telemetry - telemetry_cleanup - telemetry_deploy_prep - telemetry_deploy - telemetry_deploy_cleanup - telemetry_kuttl_run - telemetry_kuttl - swift_prep - swift - swift_cleanup - swift_deploy_prep - swift_deploy - swift_deploy_cleanup - certmanager - certmanager_cleanup - validate_marketplace - redis_deploy_prep - redis_deploy - redis_deploy_cleanup - set_slower_etcd_profile /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/Makefile: - help - download_tools - nfs - nfs_cleanup - crc - crc_cleanup - crc_scrub - crc_attach_default_interface - crc_attach_default_interface_cleanup - ipv6_lab_network - ipv6_lab_network_cleanup - ipv6_lab_nat64_router - ipv6_lab_nat64_router_cleanup - ipv6_lab_sno - ipv6_lab_sno_cleanup - ipv6_lab - ipv6_lab_cleanup - attach_default_interface - attach_default_interface_cleanup - network_isolation_bridge - network_isolation_bridge_cleanup - edpm_baremetal_compute - edpm_compute - edpm_compute_bootc - edpm_ansible_runner - edpm_computes_bgp - edpm_compute_repos - edpm_compute_cleanup - edpm_networker - edpm_networker_cleanup - edpm_deploy_instance - tripleo_deploy - standalone_deploy - standalone_sync - standalone - standalone_cleanup - standalone_snapshot - standalone_revert - cifmw_prepare - cifmw_cleanup - bmaas_network - bmaas_network_cleanup - bmaas_route_crc_and_crc_bmaas_networks - bmaas_route_crc_and_crc_bmaas_networks_cleanup - bmaas_crc_attach_network - bmaas_crc_attach_network_cleanup - bmaas_crc_baremetal_bridge - bmaas_crc_baremetal_bridge_cleanup - bmaas_baremetal_net_nad - bmaas_baremetal_net_nad_cleanup - bmaas_metallb - bmaas_metallb_cleanup - bmaas_virtual_bms - bmaas_virtual_bms_cleanup - bmaas_sushy_emulator - bmaas_sushy_emulator_cleanup - bmaas_sushy_emulator_wait - bmaas_generate_nodes_yaml - bmaas - bmaas_cleanup failed: false success: true 2025-10-06 21:10:44,159 p=28169 u=zuul n=ansible | TASK [install_yamls : Create the install_yamls parameters file dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml, content={{ { 'cifmw_install_yamls_environment': cifmw_install_yamls_environment, 'cifmw_install_yamls_defaults': cifmw_install_yamls_defaults } | to_nice_yaml }}, mode=0644] *** 2025-10-06 21:10:44,159 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:44 +0000 (0:00:00.030) 0:00:55.811 ******** 2025-10-06 21:10:44,564 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:44,579 p=28169 u=zuul n=ansible | TASK [install_yamls : Create empty cifmw_install_yamls_environment if needed cifmw_install_yamls_environment={}] *** 2025-10-06 21:10:44,579 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:44 +0000 (0:00:00.420) 0:00:56.232 ******** 2025-10-06 21:10:44,602 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:44,630 p=28169 u=zuul n=ansible | TASK [discover_latest_image : Get latest image url={{ cifmw_discover_latest_image_base_url }}, image_prefix={{ cifmw_discover_latest_image_qcow_prefix }}, images_file={{ cifmw_discover_latest_image_images_file }}] *** 2025-10-06 21:10:44,630 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:44 +0000 (0:00:00.051) 0:00:56.283 ******** 2025-10-06 21:10:45,288 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:45,295 p=28169 u=zuul n=ansible | TASK [discover_latest_image : Export facts accordingly cifmw_discovered_image_name={{ discovered_image['data']['image_name'] }}, cifmw_discovered_image_url={{ discovered_image['data']['image_url'] }}, cifmw_discovered_hash={{ discovered_image['data']['hash'] }}, cifmw_discovered_hash_algorithm={{ discovered_image['data']['hash_algorithm'] }}, cacheable=True] *** 2025-10-06 21:10:45,295 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:45 +0000 (0:00:00.664) 0:00:56.948 ******** 2025-10-06 21:10:45,318 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:45,330 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Create artifacts with custom params mode=0644, dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/custom-params.yml, content={{ ci_framework_params | to_nice_yaml }}] *** 2025-10-06 21:10:45,330 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:45 +0000 (0:00:00.034) 0:00:56.982 ******** 2025-10-06 21:10:45,728 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:10:45,740 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:10:45,740 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:45 +0000 (0:00:00.410) 0:00:57.393 ******** 2025-10-06 21:10:45,821 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:45,828 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:10:45,828 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:45 +0000 (0:00:00.087) 0:00:57.481 ******** 2025-10-06 21:10:45,931 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:45,939 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_infra _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:10:45,939 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:45 +0000 (0:00:00.110) 0:00:57.591 ******** 2025-10-06 21:10:46,054 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': 'Download needed tools', 'inventory': 'localhost,', 'connection': 'local', 'type': 'playbook', 'source': '/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml'}) 2025-10-06 21:10:46,064 p=28169 u=zuul n=ansible | TASK [run_hook : Set playbook path for Download needed tools cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e operator_namespace={{ _operator_namespace }} -e namespace={{ _namespace}} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2025-10-06 21:10:46,064 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.125) 0:00:57.717 ******** 2025-10-06 21:10:46,111 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,119 p=28169 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2025-10-06 21:10:46,119 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.054) 0:00:57.772 ******** 2025-10-06 21:10:46,305 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,320 p=28169 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2025-10-06 21:10:46,320 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.200) 0:00:57.973 ******** 2025-10-06 21:10:46,339 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:10:46,353 p=28169 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2025-10-06 21:10:46,353 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.033) 0:00:58.006 ******** 2025-10-06 21:10:46,557 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,568 p=28169 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2025-10-06 21:10:46,568 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.214) 0:00:58.220 ******** 2025-10-06 21:10:46,586 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,596 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2025-10-06 21:10:46,597 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.028) 0:00:58.249 ******** 2025-10-06 21:10:46,775 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,782 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:10:46,782 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.185) 0:00:58.434 ******** 2025-10-06 21:10:46,950 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:10:46,960 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook without retry - Download needed tools] *************** 2025-10-06 21:10:46,960 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:10:46 +0000 (0:00:00.178) 0:00:58.613 ******** 2025-10-06 21:10:47,025 p=28169 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_000_run_hook_without_retry.log 2025-10-06 21:11:17,763 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:17,772 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook with retry - Download needed tools] ****************** 2025-10-06 21:11:17,772 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:17 +0000 (0:00:30.811) 0:01:29.424 ******** 2025-10-06 21:11:17,799 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:17,810 p=28169 u=zuul n=ansible | TASK [run_hook : Check if we have a file path={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:11:17,810 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:17 +0000 (0:00:00.038) 0:01:29.462 ******** 2025-10-06 21:11:17,973 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:17,990 p=28169 u=zuul n=ansible | TASK [run_hook : Load generated content in main playbook file={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:11:17,991 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:17 +0000 (0:00:00.180) 0:01:29.643 ******** 2025-10-06 21:11:18,007 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,068 p=28169 u=zuul n=ansible | PLAY [Prepare host virtualization] ********************************************* 2025-10-06 21:11:18,094 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2025-10-06 21:11:18,094 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.103) 0:01:29.746 ******** 2025-10-06 21:11:18,155 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:18,162 p=28169 u=zuul n=ansible | TASK [Ensure libvirt is present/configured name=libvirt_manager] *************** 2025-10-06 21:11:18,162 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.068) 0:01:29.814 ******** 2025-10-06 21:11:18,186 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,193 p=28169 u=zuul n=ansible | TASK [Perpare OpenShift provisioner node name=openshift_provisioner_node] ****** 2025-10-06 21:11:18,193 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.030) 0:01:29.845 ******** 2025-10-06 21:11:18,215 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,246 p=28169 u=zuul n=ansible | PLAY [Run cifmw_setup infra, build package, container and operators, deploy EDPM] *** 2025-10-06 21:11:18,280 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2025-10-06 21:11:18,280 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.087) 0:01:29.933 ******** 2025-10-06 21:11:18,336 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:18,344 p=28169 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Environment Definition file existence path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2025-10-06 21:11:18,344 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.063) 0:01:29.996 ******** 2025-10-06 21:11:18,524 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:18,531 p=28169 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Definition file existance that=['_net_env_def_stat.stat.exists'], msg=Ensure that the Networking Environment Definition file exists in {{ cifmw_networking_mapper_networking_env_def_path }}, quiet=True] *** 2025-10-06 21:11:18,531 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.187) 0:01:30.184 ******** 2025-10-06 21:11:18,551 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,559 p=28169 u=zuul n=ansible | TASK [networking_mapper : Load the Networking Definition from file path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2025-10-06 21:11:18,559 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.027) 0:01:30.211 ******** 2025-10-06 21:11:18,579 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,587 p=28169 u=zuul n=ansible | TASK [networking_mapper : Set cifmw_networking_env_definition is present cifmw_networking_env_definition={{ _net_env_def_slurp['content'] | b64decode | from_yaml }}, cacheable=True] *** 2025-10-06 21:11:18,587 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.027) 0:01:30.239 ******** 2025-10-06 21:11:18,606 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,620 p=28169 u=zuul n=ansible | TASK [Deploy OCP using Hive name=hive] ***************************************** 2025-10-06 21:11:18,620 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.033) 0:01:30.273 ******** 2025-10-06 21:11:18,641 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,649 p=28169 u=zuul n=ansible | TASK [Prepare CRC name=rhol_crc] *********************************************** 2025-10-06 21:11:18,649 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.028) 0:01:30.301 ******** 2025-10-06 21:11:18,672 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,679 p=28169 u=zuul n=ansible | TASK [Deploy OpenShift cluster using dev-scripts name=devscripts] ************** 2025-10-06 21:11:18,680 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.030) 0:01:30.332 ******** 2025-10-06 21:11:18,699 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:18,707 p=28169 u=zuul n=ansible | TASK [openshift_login : Ensure output directory exists path={{ cifmw_openshift_login_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:11:18,707 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.027) 0:01:30.359 ******** 2025-10-06 21:11:18,887 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:18,903 p=28169 u=zuul n=ansible | TASK [openshift_login : OpenShift login _raw_params=login.yml] ***************** 2025-10-06 21:11:18,904 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.197) 0:01:30.556 ******** 2025-10-06 21:11:18,954 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/login.yml for localhost 2025-10-06 21:11:18,971 p=28169 u=zuul n=ansible | TASK [openshift_login : Check if the password file is present path={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2025-10-06 21:11:18,971 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:18 +0000 (0:00:00.067) 0:01:30.623 ******** 2025-10-06 21:11:18,992 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,002 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch user password content src={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2025-10-06 21:11:19,002 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.031) 0:01:30.655 ******** 2025-10-06 21:11:19,023 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,034 p=28169 u=zuul n=ansible | TASK [openshift_login : Set user password as a fact cifmw_openshift_login_password={{ cifmw_openshift_login_password_file_slurp.content | b64decode }}, cacheable=True] *** 2025-10-06 21:11:19,034 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.031) 0:01:30.686 ******** 2025-10-06 21:11:19,055 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,066 p=28169 u=zuul n=ansible | TASK [openshift_login : Set role variables cifmw_openshift_login_kubeconfig={{ cifmw_openshift_login_kubeconfig | default(cifmw_openshift_kubeconfig) | default( ansible_env.KUBECONFIG if 'KUBECONFIG' in ansible_env else cifmw_openshift_login_kubeconfig_default_path ) | trim }}, cifmw_openshift_login_user={{ cifmw_openshift_login_user | default(cifmw_openshift_user) | default(omit) }}, cifmw_openshift_login_password={{ cifmw_openshift_login_password | default(cifmw_openshift_password) | default(omit) }}, cifmw_openshift_login_api={{ cifmw_openshift_login_api | default(cifmw_openshift_api) | default(omit) }}, cifmw_openshift_login_cert_login={{ cifmw_openshift_login_cert_login | default(false)}}, cifmw_openshift_login_provided_token={{ cifmw_openshift_provided_token | default(omit) }}, cacheable=True] *** 2025-10-06 21:11:19,066 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.032) 0:01:30.718 ******** 2025-10-06 21:11:19,101 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:19,110 p=28169 u=zuul n=ansible | TASK [openshift_login : Check if kubeconfig exists path={{ cifmw_openshift_login_kubeconfig }}] *** 2025-10-06 21:11:19,110 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.044) 0:01:30.763 ******** 2025-10-06 21:11:19,277 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:19,285 p=28169 u=zuul n=ansible | TASK [openshift_login : Assert that enough data is provided to log in to OpenShift that=cifmw_openshift_login_kubeconfig_stat.stat.exists or (cifmw_openshift_login_provided_token is defined and cifmw_openshift_login_provided_token != '') or ( (cifmw_openshift_login_user is defined) and (cifmw_openshift_login_password is defined) and (cifmw_openshift_login_api is defined) ), msg=If an existing kubeconfig is not provided user/pwd or provided/initial token and API URL must be given] *** 2025-10-06 21:11:19,285 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.174) 0:01:30.938 ******** 2025-10-06 21:11:19,317 p=28169 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-10-06 21:11:19,328 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch kubeconfig content src={{ cifmw_openshift_login_kubeconfig }}] *** 2025-10-06 21:11:19,328 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.043) 0:01:30.981 ******** 2025-10-06 21:11:19,349 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,360 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch x509 key based users cifmw_openshift_login_key_based_users={{ ( cifmw_openshift_login_kubeconfig_content_b64.content | b64decode | from_yaml ). users | default([]) | selectattr('user.client-certificate-data', 'defined') | map(attribute="name") | map("split", "/") | map("first") }}, cacheable=True] *** 2025-10-06 21:11:19,360 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.031) 0:01:31.012 ******** 2025-10-06 21:11:19,385 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,396 p=28169 u=zuul n=ansible | TASK [openshift_login : Assign key based user if not provided and available cifmw_openshift_login_user={{ (cifmw_openshift_login_assume_cert_system_user | ternary('system:', '')) + (cifmw_openshift_login_key_based_users | map('replace', 'system:', '') | unique | first) }}, cifmw_openshift_login_cert_login=True, cacheable=True] *** 2025-10-06 21:11:19,396 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.036) 0:01:31.049 ******** 2025-10-06 21:11:19,422 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,433 p=28169 u=zuul n=ansible | TASK [openshift_login : Set the retry count cifmw_openshift_login_retries_cnt={{ 0 if cifmw_openshift_login_retries_cnt is undefined else cifmw_openshift_login_retries_cnt|int + 1 }}] *** 2025-10-06 21:11:19,433 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.037) 0:01:31.086 ******** 2025-10-06 21:11:19,465 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:19,477 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch token _raw_params=try_login.yml] ***************** 2025-10-06 21:11:19,477 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.043) 0:01:31.129 ******** 2025-10-06 21:11:19,507 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/try_login.yml for localhost 2025-10-06 21:11:19,523 p=28169 u=zuul n=ansible | TASK [openshift_login : Try get OpenShift access token _raw_params=oc whoami -t] *** 2025-10-06 21:11:19,523 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.046) 0:01:31.175 ******** 2025-10-06 21:11:19,539 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:19,548 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift token output_dir={{ cifmw_openshift_login_basedir }}/artifacts, script=oc login {%- if cifmw_openshift_login_provided_token is not defined %} {%- if cifmw_openshift_login_user is defined %} -u {{ cifmw_openshift_login_user }} {%- endif %} {%- if cifmw_openshift_login_password is defined %} -p {{ cifmw_openshift_login_password }} {%- endif %} {% else %} --token={{ cifmw_openshift_login_provided_token }} {%- endif %} {%- if cifmw_openshift_login_skip_tls_verify|bool %} --insecure-skip-tls-verify=true {%- endif %} {%- if cifmw_openshift_login_api is defined %} {{ cifmw_openshift_login_api }} {%- endif %}] *** 2025-10-06 21:11:19,548 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:19 +0000 (0:00:00.024) 0:01:31.200 ******** 2025-10-06 21:11:19,605 p=28169 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_001_fetch_openshift.log 2025-10-06 21:11:20,140 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:20,156 p=28169 u=zuul n=ansible | TASK [openshift_login : Ensure kubeconfig is provided that=cifmw_openshift_login_kubeconfig != ""] *** 2025-10-06 21:11:20,156 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:20 +0000 (0:00:00.608) 0:01:31.808 ******** 2025-10-06 21:11:20,191 p=28169 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2025-10-06 21:11:20,242 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch new OpenShift access token _raw_params=oc whoami -t] *** 2025-10-06 21:11:20,242 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:20 +0000 (0:00:00.085) 0:01:31.894 ******** 2025-10-06 21:11:20,531 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:20,539 p=28169 u=zuul n=ansible | TASK [openshift_login : Set new OpenShift token cifmw_openshift_login_token={{ (not cifmw_openshift_login_new_token_out.skipped | default(false)) | ternary(cifmw_openshift_login_new_token_out.stdout, cifmw_openshift_login_whoami_out.stdout) }}, cacheable=True] *** 2025-10-06 21:11:20,540 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:20 +0000 (0:00:00.297) 0:01:32.192 ******** 2025-10-06 21:11:20,566 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:20,573 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift API URL _raw_params=oc whoami --show-server=true] *** 2025-10-06 21:11:20,573 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:20 +0000 (0:00:00.033) 0:01:32.225 ******** 2025-10-06 21:11:20,899 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:20,907 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift kubeconfig context _raw_params=oc whoami -c] *** 2025-10-06 21:11:20,908 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:20 +0000 (0:00:00.334) 0:01:32.560 ******** 2025-10-06 21:11:21,203 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:21,213 p=28169 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift current user _raw_params=oc whoami] **** 2025-10-06 21:11:21,213 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:21 +0000 (0:00:00.305) 0:01:32.865 ******** 2025-10-06 21:11:21,524 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:21,533 p=28169 u=zuul n=ansible | TASK [openshift_login : Set OpenShift user, context and API facts cifmw_openshift_login_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_login_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_login_user={{ _oauth_user }}, cifmw_openshift_kubeconfig={{ cifmw_openshift_login_kubeconfig }}, cifmw_openshift_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_user={{ _oauth_user }}, cifmw_openshift_token={{ cifmw_openshift_login_token | default(omit) }}, cifmw_install_yamls_environment={{ ( cifmw_install_yamls_environment | combine({'KUBECONFIG': cifmw_openshift_login_kubeconfig}) ) if cifmw_install_yamls_environment is defined else omit }}, cacheable=True] *** 2025-10-06 21:11:21,534 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:21 +0000 (0:00:00.320) 0:01:33.186 ******** 2025-10-06 21:11:21,577 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:21,584 p=28169 u=zuul n=ansible | TASK [openshift_login : Create the openshift_login parameters file dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/openshift-login-params.yml, content={{ cifmw_openshift_login_params_content | from_yaml | to_nice_yaml }}, mode=0600] *** 2025-10-06 21:11:21,584 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:21 +0000 (0:00:00.050) 0:01:33.237 ******** 2025-10-06 21:11:22,016 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:22,023 p=28169 u=zuul n=ansible | TASK [openshift_login : Read the install yamls parameters file path={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml] *** 2025-10-06 21:11:22,024 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:22 +0000 (0:00:00.439) 0:01:33.676 ******** 2025-10-06 21:11:22,351 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:22,364 p=28169 u=zuul n=ansible | TASK [openshift_login : Append the KUBECONFIG to the install yamls parameters content={{ cifmw_openshift_login_install_yamls_artifacts_slurp['content'] | b64decode | from_yaml | combine( { 'cifmw_install_yamls_environment': { 'KUBECONFIG': cifmw_openshift_login_kubeconfig } }, recursive=true) | to_nice_yaml }}, dest={{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts/parameters/install-yamls-params.yml, mode=0600] *** 2025-10-06 21:11:22,364 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:22 +0000 (0:00:00.340) 0:01:34.016 ******** 2025-10-06 21:11:22,812 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:22,842 p=28169 u=zuul n=ansible | TASK [openshift_setup : Ensure output directory exists path={{ cifmw_openshift_setup_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:11:22,842 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:22 +0000 (0:00:00.478) 0:01:34.495 ******** 2025-10-06 21:11:23,046 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:23,056 p=28169 u=zuul n=ansible | TASK [openshift_setup : Fetch namespaces to create cifmw_openshift_setup_namespaces={{ (( ([cifmw_install_yamls_defaults['NAMESPACE']] + ([cifmw_install_yamls_defaults['OPERATOR_NAMESPACE']] if 'OPERATOR_NAMESPACE' is in cifmw_install_yamls_defaults else []) ) if cifmw_install_yamls_defaults is defined else [] ) + cifmw_openshift_setup_create_namespaces) | unique }}] *** 2025-10-06 21:11:23,056 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:23 +0000 (0:00:00.213) 0:01:34.709 ******** 2025-10-06 21:11:23,081 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:23,097 p=28169 u=zuul n=ansible | TASK [openshift_setup : Create required namespaces kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ item }}, kind=Namespace, state=present] *** 2025-10-06 21:11:23,097 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:23 +0000 (0:00:00.041) 0:01:34.750 ******** 2025-10-06 21:11:23,993 p=28169 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2025-10-06 21:11:24,667 p=28169 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2025-10-06 21:11:24,679 p=28169 u=zuul n=ansible | TASK [openshift_setup : Get internal OpenShift registry route kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Route, name=default-route, namespace=openshift-image-registry] *** 2025-10-06 21:11:24,679 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:01.581) 0:01:36.331 ******** 2025-10-06 21:11:24,694 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,704 p=28169 u=zuul n=ansible | TASK [openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces state=present, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'kind': 'RoleBinding', 'apiVersion': 'rbac.authorization.k8s.io/v1', 'metadata': {'name': 'system:image-puller', 'namespace': '{{ item }}'}, 'subjects': [{'kind': 'User', 'name': 'system:anonymous'}, {'kind': 'User', 'name': 'system:unauthenticated'}], 'roleRef': {'kind': 'ClusterRole', 'name': 'system:image-puller'}}] *** 2025-10-06 21:11:24,705 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.025) 0:01:36.357 ******** 2025-10-06 21:11:24,725 p=28169 u=zuul n=ansible | skipping: [localhost] => (item=openstack) 2025-10-06 21:11:24,726 p=28169 u=zuul n=ansible | skipping: [localhost] => (item=openstack-operators) 2025-10-06 21:11:24,726 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,735 p=28169 u=zuul n=ansible | TASK [openshift_setup : Wait for the image registry to be ready kind=Deployment, name=image-registry, namespace=openshift-image-registry, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, wait=True, wait_sleep=10, wait_timeout=600, wait_condition={'type': 'Available', 'status': 'True'}] *** 2025-10-06 21:11:24,735 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.030) 0:01:36.387 ******** 2025-10-06 21:11:24,755 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,763 p=28169 u=zuul n=ansible | TASK [openshift_setup : Login into OpenShift internal registry output_dir={{ cifmw_openshift_setup_basedir }}/artifacts, script=podman login -u {{ cifmw_openshift_user }} -p {{ cifmw_openshift_token }} {%- if cifmw_openshift_setup_skip_internal_registry_tls_verify|bool %} --tls-verify=false {%- endif %} {{ cifmw_openshift_setup_registry_default_route.resources[0].spec.host }}] *** 2025-10-06 21:11:24,764 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.028) 0:01:36.416 ******** 2025-10-06 21:11:24,783 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,791 p=28169 u=zuul n=ansible | TASK [Ensure we have custom CA installed on host role=install_ca] ************** 2025-10-06 21:11:24,791 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.027) 0:01:36.444 ******** 2025-10-06 21:11:24,809 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,818 p=28169 u=zuul n=ansible | TASK [openshift_setup : Update ca bundle _raw_params=update-ca-trust extract] *** 2025-10-06 21:11:24,818 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.026) 0:01:36.470 ******** 2025-10-06 21:11:24,839 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,848 p=28169 u=zuul n=ansible | TASK [openshift_setup : Slurp CAs file src={{ cifmw_openshift_setup_ca_bundle_path }}] *** 2025-10-06 21:11:24,848 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.030) 0:01:36.501 ******** 2025-10-06 21:11:24,870 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,883 p=28169 u=zuul n=ansible | TASK [openshift_setup : Create config map with registry CAs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'v1', 'kind': 'ConfigMap', 'metadata': {'namespace': 'openshift-config', 'name': 'registry-cas'}, 'data': '{{ _config_map_data | items2dict }}'}] *** 2025-10-06 21:11:24,883 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.034) 0:01:36.535 ******** 2025-10-06 21:11:24,905 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,914 p=28169 u=zuul n=ansible | TASK [openshift_setup : Install Red Hat CA for pulling images from internal registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'additionalTrustedCA': {'name': 'registry-cas'}}}] *** 2025-10-06 21:11:24,914 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.030) 0:01:36.566 ******** 2025-10-06 21:11:24,936 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:24,944 p=28169 u=zuul n=ansible | TASK [openshift_setup : Add insecure registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'registrySources': {'insecureRegistries': ['{{ cifmw_update_containers_registry }}'], 'allowedRegistries': '{{ all_registries }}'}}}] *** 2025-10-06 21:11:24,944 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:24 +0000 (0:00:00.030) 0:01:36.596 ******** 2025-10-06 21:11:25,645 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:25,653 p=28169 u=zuul n=ansible | TASK [openshift_setup : Create a ICSP with repository digest mirrors kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'operator.openshift.io/v1alpha1', 'kind': 'ImageContentSourcePolicy', 'metadata': {'name': 'registry-digest-mirrors'}, 'spec': {'repositoryDigestMirrors': '{{ cifmw_openshift_setup_digest_mirrors }}'}}] *** 2025-10-06 21:11:25,653 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:25 +0000 (0:00:00.708) 0:01:37.305 ******** 2025-10-06 21:11:25,679 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:25,689 p=28169 u=zuul n=ansible | TASK [openshift_setup : Metal3 tweaks _raw_params=metal3_config.yml] *********** 2025-10-06 21:11:25,689 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:25 +0000 (0:00:00.036) 0:01:37.341 ******** 2025-10-06 21:11:25,716 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_setup/tasks/metal3_config.yml for localhost 2025-10-06 21:11:25,731 p=28169 u=zuul n=ansible | TASK [openshift_setup : Fetch Metal3 configuration name _raw_params=oc get Provisioning -o name] *** 2025-10-06 21:11:25,731 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:25 +0000 (0:00:00.041) 0:01:37.383 ******** 2025-10-06 21:11:25,747 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:25,756 p=28169 u=zuul n=ansible | TASK [openshift_setup : Apply the patch to Metal3 Provisioning _raw_params=oc patch {{ _cifmw_openshift_setup_provisioning_name.stdout }} --type='json' -p='[{"op": "replace", "path": "/spec/watchAllNamespaces", "value": true}]'] *** 2025-10-06 21:11:25,757 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:25 +0000 (0:00:00.025) 0:01:37.409 ******** 2025-10-06 21:11:25,769 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:25,778 p=28169 u=zuul n=ansible | TASK [openshift_setup : Gather network.operator info kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=operator.openshift.io/v1, kind=Network, name=cluster] *** 2025-10-06 21:11:25,778 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:25 +0000 (0:00:00.021) 0:01:37.430 ******** 2025-10-06 21:11:26,688 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:26,707 p=28169 u=zuul n=ansible | TASK [openshift_setup : Patch network operator api_version=operator.openshift.io/v1, kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Network, name=cluster, persist_config=True, patch=[{'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/routingViaHost', 'value': True, 'op': 'replace'}, {'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/ipForwarding', 'value': 'Global', 'op': 'replace'}]] *** 2025-10-06 21:11:26,707 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:26 +0000 (0:00:00.928) 0:01:38.359 ******** 2025-10-06 21:11:27,657 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:27,665 p=28169 u=zuul n=ansible | TASK [openshift_setup : Patch samples registry configuration kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=samples.operator.openshift.io/v1, kind=Config, name=cluster, patch=[{'op': 'replace', 'path': '/spec/samplesRegistry', 'value': 'registry.redhat.io'}]] *** 2025-10-06 21:11:27,665 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:27 +0000 (0:00:00.958) 0:01:39.318 ******** 2025-10-06 21:11:28,419 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:28,438 p=28169 u=zuul n=ansible | TASK [openshift_setup : Delete the pods from openshift-marketplace namespace kind=Pod, state=absent, delete_all=True, kubeconfig={{ cifmw_openshift_kubeconfig }}, namespace=openshift-marketplace] *** 2025-10-06 21:11:28,438 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.772) 0:01:40.091 ******** 2025-10-06 21:11:28,459 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:28,477 p=28169 u=zuul n=ansible | TASK [openshift_setup : Wait for openshift-marketplace pods to be running _raw_params=oc wait pod --all --for=condition=Ready -n openshift-marketplace --timeout=1m] *** 2025-10-06 21:11:28,477 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.038) 0:01:40.129 ******** 2025-10-06 21:11:28,496 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:28,515 p=28169 u=zuul n=ansible | TASK [Deploy Observability operator. name=openshift_obs] *********************** 2025-10-06 21:11:28,515 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.038) 0:01:40.168 ******** 2025-10-06 21:11:28,537 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:28,548 p=28169 u=zuul n=ansible | TASK [Deploy Metal3 BMHs name=deploy_bmh] ************************************** 2025-10-06 21:11:28,548 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.032) 0:01:40.200 ******** 2025-10-06 21:11:28,576 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:28,588 p=28169 u=zuul n=ansible | TASK [Install certmanager operator role name=cert_manager] ********************* 2025-10-06 21:11:28,588 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.040) 0:01:40.240 ******** 2025-10-06 21:11:28,694 p=28169 u=zuul n=ansible | TASK [cert_manager : Create role needed directories path={{ cifmw_cert_manager_manifests_dir }}, state=directory, mode=0755] *** 2025-10-06 21:11:28,694 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.106) 0:01:40.347 ******** 2025-10-06 21:11:28,908 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:28,915 p=28169 u=zuul n=ansible | TASK [cert_manager : Create the cifmw_cert_manager_operator_namespace namespace" kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ cifmw_cert_manager_operator_namespace }}, kind=Namespace, state=present] *** 2025-10-06 21:11:28,915 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:28 +0000 (0:00:00.220) 0:01:40.567 ******** 2025-10-06 21:11:29,624 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:29,632 p=28169 u=zuul n=ansible | TASK [cert_manager : Install from Release Manifest _raw_params=release_manifest.yml] *** 2025-10-06 21:11:29,632 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:29 +0000 (0:00:00.717) 0:01:41.285 ******** 2025-10-06 21:11:29,662 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/cert_manager/tasks/release_manifest.yml for localhost 2025-10-06 21:11:29,674 p=28169 u=zuul n=ansible | TASK [cert_manager : Download release manifests url={{ cifmw_cert_manager_release_manifest }}, dest={{ cifmw_cert_manager_manifests_dir }}/cert_manager_manifest.yml, mode=0664] *** 2025-10-06 21:11:29,674 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:29 +0000 (0:00:00.041) 0:01:41.326 ******** 2025-10-06 21:11:30,233 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:30,241 p=28169 u=zuul n=ansible | TASK [cert_manager : Install cert-manager from release manifest kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, state=present, src={{ cifmw_cert_manager_manifests_dir }}/cert_manager_manifest.yml] *** 2025-10-06 21:11:30,241 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:30 +0000 (0:00:00.566) 0:01:41.893 ******** 2025-10-06 21:11:32,722 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:32,740 p=28169 u=zuul n=ansible | TASK [cert_manager : Install from OLM Manifest _raw_params=olm_manifest.yml] *** 2025-10-06 21:11:32,740 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:32 +0000 (0:00:02.499) 0:01:44.393 ******** 2025-10-06 21:11:32,754 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:32,764 p=28169 u=zuul n=ansible | TASK [cert_manager : Check for cert-manager namspeace existance kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name=cert-manager, kind=Namespace, field_selectors=['status.phase=Active']] *** 2025-10-06 21:11:32,764 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:32 +0000 (0:00:00.024) 0:01:44.417 ******** 2025-10-06 21:11:33,434 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:33,442 p=28169 u=zuul n=ansible | TASK [cert_manager : Wait for cert-manager pods to be ready kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, namespace=cert-manager, kind=Pod, wait=True, wait_sleep=10, wait_timeout=600, wait_condition={'type': 'Ready', 'status': 'True'}, label_selectors=['app = {{ item }}']] *** 2025-10-06 21:11:33,442 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:33 +0000 (0:00:00.677) 0:01:45.095 ******** 2025-10-06 21:11:44,189 p=28169 u=zuul n=ansible | ok: [localhost] => (item=cainjector) 2025-10-06 21:11:44,912 p=28169 u=zuul n=ansible | ok: [localhost] => (item=webhook) 2025-10-06 21:11:45,592 p=28169 u=zuul n=ansible | ok: [localhost] => (item=cert-manager) 2025-10-06 21:11:45,607 p=28169 u=zuul n=ansible | TASK [cert_manager : Create $HOME/bin dir path={{ lookup('env', 'HOME') }}/bin, state=directory, mode=0755] *** 2025-10-06 21:11:45,607 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:45 +0000 (0:00:12.165) 0:01:57.260 ******** 2025-10-06 21:11:45,793 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:45,801 p=28169 u=zuul n=ansible | TASK [cert_manager : Install cert-manager cmctl CLI url=https://github.com/cert-manager/cmctl/releases/{{ cifmw_cert_manager_version }}/download/cmctl_{{ _os }}_{{ _arch }}, dest={{ lookup('env', 'HOME') }}/bin/cmctl, mode=0755] *** 2025-10-06 21:11:45,801 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:45 +0000 (0:00:00.194) 0:01:57.454 ******** 2025-10-06 21:11:47,045 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:47,055 p=28169 u=zuul n=ansible | TASK [cert_manager : Verify cert_manager api _raw_params={{ lookup('env', 'HOME') }}/bin/cmctl check api --wait=2m] *** 2025-10-06 21:11:47,055 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:01.253) 0:01:58.707 ******** 2025-10-06 21:11:47,363 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:47,377 p=28169 u=zuul n=ansible | TASK [Configure hosts networking using nmstate name=ci_nmstate] **************** 2025-10-06 21:11:47,377 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.321) 0:01:59.029 ******** 2025-10-06 21:11:47,401 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:47,412 p=28169 u=zuul n=ansible | TASK [Configure multus networks name=ci_multus] ******************************** 2025-10-06 21:11:47,412 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.035) 0:01:59.065 ******** 2025-10-06 21:11:47,433 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:47,445 p=28169 u=zuul n=ansible | TASK [Deploy Sushy Emulator service pod name=sushy_emulator] ******************* 2025-10-06 21:11:47,446 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.033) 0:01:59.098 ******** 2025-10-06 21:11:47,468 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:47,479 p=28169 u=zuul n=ansible | TASK [Setup Libvirt on controller name=libvirt_manager] ************************ 2025-10-06 21:11:47,479 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.033) 0:01:59.131 ******** 2025-10-06 21:11:47,499 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:47,507 p=28169 u=zuul n=ansible | TASK [Prepare container package builder name=pkg_build] ************************ 2025-10-06 21:11:47,507 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.028) 0:01:59.160 ******** 2025-10-06 21:11:47,535 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:47,543 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:11:47,543 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.036) 0:01:59.196 ******** 2025-10-06 21:11:47,607 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:47,618 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:11:47,618 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.074) 0:01:59.271 ******** 2025-10-06 21:11:47,707 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:47,716 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_infra _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:11:47,716 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.097) 0:01:59.368 ******** 2025-10-06 21:11:47,828 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': 'Fetch nodes facts and save them as parameters', 'type': 'playbook', 'inventory': '/home/zuul/ci-framework-data/artifacts/zuul_inventory.yml', 'source': 'fetch_compute_facts.yml'}) 2025-10-06 21:11:47,839 p=28169 u=zuul n=ansible | TASK [run_hook : Set playbook path for Fetch nodes facts and save them as parameters cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e operator_namespace={{ _operator_namespace }} -e namespace={{ _namespace}} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2025-10-06 21:11:47,839 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.123) 0:01:59.492 ******** 2025-10-06 21:11:47,880 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:47,888 p=28169 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2025-10-06 21:11:47,888 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:47 +0000 (0:00:00.048) 0:01:59.540 ******** 2025-10-06 21:11:48,069 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:48,077 p=28169 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2025-10-06 21:11:48,078 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.189) 0:01:59.730 ******** 2025-10-06 21:11:48,091 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:48,101 p=28169 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2025-10-06 21:11:48,101 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.023) 0:01:59.754 ******** 2025-10-06 21:11:48,293 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:48,304 p=28169 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2025-10-06 21:11:48,304 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.202) 0:01:59.957 ******** 2025-10-06 21:11:48,322 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:48,333 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2025-10-06 21:11:48,333 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.028) 0:01:59.986 ******** 2025-10-06 21:11:48,514 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:48,522 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:11:48,522 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.188) 0:02:00.174 ******** 2025-10-06 21:11:48,693 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:48,705 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook without retry - Fetch nodes facts and save them as parameters] *** 2025-10-06 21:11:48,706 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:48 +0000 (0:00:00.183) 0:02:00.358 ******** 2025-10-06 21:11:48,760 p=28169 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_002_run_hook_without_retry_fetch.log 2025-10-06 21:11:59,247 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:11:59,259 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook with retry - Fetch nodes facts and save them as parameters] *** 2025-10-06 21:11:59,260 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:10.553) 0:02:10.912 ******** 2025-10-06 21:11:59,278 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:59,286 p=28169 u=zuul n=ansible | TASK [run_hook : Check if we have a file path={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:11:59,286 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.026) 0:02:10.938 ******** 2025-10-06 21:11:59,456 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,463 p=28169 u=zuul n=ansible | TASK [run_hook : Load generated content in main playbook file={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:11:59,463 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.177) 0:02:11.116 ******** 2025-10-06 21:11:59,483 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,502 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:11:59,502 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.038) 0:02:11.154 ******** 2025-10-06 21:11:59,553 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,560 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:11:59,560 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.058) 0:02:11.213 ******** 2025-10-06 21:11:59,655 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,663 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_package_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:11:59,663 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.102) 0:02:11.316 ******** 2025-10-06 21:11:59,749 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:59,762 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2025-10-06 21:11:59,762 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.098) 0:02:11.414 ******** 2025-10-06 21:11:59,840 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,848 p=28169 u=zuul n=ansible | TASK [pkg_build : Generate volume list build_volumes={% for pkg in cifmw_pkg_build_list -%} - "{{ pkg.src|default(cifmw_pkg_build_pkg_basedir ~ '/' ~ pkg.name) }}:/root/src/{{ pkg.name }}:z" - "{{ cifmw_pkg_build_basedir }}/volumes/packages/{{ pkg.name }}:/root/{{ pkg.name }}:z" - "{{ cifmw_pkg_build_basedir }}/logs/build_{{ pkg.name }}:/root/logs:z" {% endfor -%} - "{{ cifmw_pkg_build_basedir }}/volumes/packages/gating_repo:/root/gating_repo:z" - "{{ cifmw_pkg_build_basedir }}/artifacts/repositories:/root/yum.repos.d:z,ro" - "{{ cifmw_pkg_build_basedir }}/artifacts/build-packages.yml:/root/playbook.yml:z,ro" ] *** 2025-10-06 21:11:59,849 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.086) 0:02:11.501 ******** 2025-10-06 21:11:59,870 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:59,878 p=28169 u=zuul n=ansible | TASK [pkg_build : Build package using container name={{ pkg.name }}-builder, auto_remove=True, detach=False, privileged=True, log_driver=k8s-file, log_level=info, log_opt={'path': '{{ cifmw_pkg_build_basedir }}/logs/{{ pkg.name }}-builder.log'}, image={{ cifmw_pkg_build_ctx_name }}, volume={{ build_volumes | from_yaml }}, security_opt=['label=disable', 'seccomp=unconfined', 'apparmor=unconfined'], env={'PROJECT': '{{ pkg.name }}'}, command=ansible-playbook -i localhost, -c local playbook.yml] *** 2025-10-06 21:11:59,878 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.029) 0:02:11.531 ******** 2025-10-06 21:11:59,890 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:11:59,903 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:11:59,903 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.024) 0:02:11.556 ******** 2025-10-06 21:11:59,965 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:11:59,972 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:11:59,973 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:11:59 +0000 (0:00:00.069) 0:02:11.625 ******** 2025-10-06 21:12:00,064 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,072 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_package_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:00,072 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.099) 0:02:11.725 ******** 2025-10-06 21:12:00,161 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:00,203 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:12:00,203 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.130) 0:02:11.855 ******** 2025-10-06 21:12:00,253 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,266 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:12:00,266 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.062) 0:02:11.918 ******** 2025-10-06 21:12:00,362 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,380 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_container_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:00,380 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.114) 0:02:12.032 ******** 2025-10-06 21:12:00,476 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:00,489 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2025-10-06 21:12:00,489 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.109) 0:02:12.142 ******** 2025-10-06 21:12:00,602 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,610 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Nothing to do yet msg=No support for that step yet] ******** 2025-10-06 21:12:00,610 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.120) 0:02:12.262 ******** 2025-10-06 21:12:00,625 p=28169 u=zuul n=ansible | ok: [localhost] => msg: No support for that step yet 2025-10-06 21:12:00,633 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:12:00,633 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.023) 0:02:12.285 ******** 2025-10-06 21:12:00,684 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,693 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:12:00,693 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.059) 0:02:12.345 ******** 2025-10-06 21:12:00,824 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:00,832 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_container_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:00,832 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.139) 0:02:12.485 ******** 2025-10-06 21:12:00,924 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:00,943 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:12:00,943 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:00 +0000 (0:00:00.110) 0:02:12.596 ******** 2025-10-06 21:12:00,996 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,004 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:12:01,004 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.061) 0:02:12.657 ******** 2025-10-06 21:12:01,102 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,110 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_operator_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:01,110 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.105) 0:02:12.763 ******** 2025-10-06 21:12:01,203 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,230 p=28169 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2025-10-06 21:12:01,231 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.120) 0:02:12.883 ******** 2025-10-06 21:12:01,283 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,291 p=28169 u=zuul n=ansible | TASK [operator_build : Ensure mandatory directories exist path={{ cifmw_operator_build_basedir }}/{{ item }}, state=directory, mode=0755] *** 2025-10-06 21:12:01,291 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.060) 0:02:12.943 ******** 2025-10-06 21:12:01,316 p=28169 u=zuul n=ansible | skipping: [localhost] => (item=artifacts) 2025-10-06 21:12:01,320 p=28169 u=zuul n=ansible | skipping: [localhost] => (item=logs) 2025-10-06 21:12:01,321 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,331 p=28169 u=zuul n=ansible | TASK [operator_build : Initialize role output cifmw_operator_build_output={{ cifmw_operator_build_output }}, cifmw_operator_build_meta_name={{ cifmw_operator_build_meta_name }}] *** 2025-10-06 21:12:01,331 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.039) 0:02:12.983 ******** 2025-10-06 21:12:01,351 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,359 p=28169 u=zuul n=ansible | TASK [operator_build : Populate operators list with zuul info _raw_params=zuul_info.yml] *** 2025-10-06 21:12:01,360 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.028) 0:02:13.012 ******** 2025-10-06 21:12:01,385 p=28169 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'main', 'change': '287', 'change_url': 'https://github.com/openstack-k8s-operators/watcher-operator/pull/287', 'commit_id': '14377136e67c9cd67507a059bfde2f19f140387d', 'patchset': '14377136e67c9cd67507a059bfde2f19f140387d', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/openstack-k8s-operators/watcher-operator', 'name': 'openstack-k8s-operators/watcher-operator', 'short_name': 'watcher-operator', 'src_dir': 'src/github.com/openstack-k8s-operators/watcher-operator'}, 'topic': None}) 2025-10-06 21:12:01,386 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,394 p=28169 u=zuul n=ansible | TASK [operator_build : Merge lists of operators operators_list={{ [cifmw_operator_build_operators, zuul_info_operators | default([])] | community.general.lists_mergeby('name') }}] *** 2025-10-06 21:12:01,394 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.034) 0:02:13.046 ******** 2025-10-06 21:12:01,418 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,426 p=28169 u=zuul n=ansible | TASK [operator_build : Get meta_operator src dir from operators_list cifmw_operator_build_meta_src={{ (operators_list | selectattr('name', 'eq', cifmw_operator_build_meta_name) | map(attribute='src') | first ) | default(cifmw_operator_build_meta_src, true) }}] *** 2025-10-06 21:12:01,427 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.032) 0:02:13.079 ******** 2025-10-06 21:12:01,449 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,456 p=28169 u=zuul n=ansible | TASK [operator_build : Adds meta-operator to the list operators_list={{ [operators_list, meta_operator_info] | community.general.lists_mergeby('name') }}] *** 2025-10-06 21:12:01,456 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.029) 0:02:13.109 ******** 2025-10-06 21:12:01,478 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,486 p=28169 u=zuul n=ansible | TASK [operator_build : Clone operator's code when src dir is empty _raw_params=clone.yml] *** 2025-10-06 21:12:01,486 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.029) 0:02:13.138 ******** 2025-10-06 21:12:01,508 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,516 p=28169 u=zuul n=ansible | TASK [operator_build : Building operators _raw_params=build.yml] *************** 2025-10-06 21:12:01,516 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.029) 0:02:13.168 ******** 2025-10-06 21:12:01,538 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,546 p=28169 u=zuul n=ansible | TASK [operator_build : Building meta operator _raw_params=build.yml] *********** 2025-10-06 21:12:01,546 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.030) 0:02:13.199 ******** 2025-10-06 21:12:01,570 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,578 p=28169 u=zuul n=ansible | TASK [operator_build : Gather role output dest={{ cifmw_operator_build_basedir }}/artifacts/custom-operators.yml, content={{ cifmw_operator_build_output | to_nice_yaml }}, mode=0644] *** 2025-10-06 21:12:01,578 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.031) 0:02:13.230 ******** 2025-10-06 21:12:01,599 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,613 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:12:01,613 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.035) 0:02:13.266 ******** 2025-10-06 21:12:01,674 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,684 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:12:01,684 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.071) 0:02:13.337 ******** 2025-10-06 21:12:01,783 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,795 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_operator_build _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:01,795 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.110) 0:02:13.447 ******** 2025-10-06 21:12:01,890 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:01,910 p=28169 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2025-10-06 21:12:01,910 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.115) 0:02:13.563 ******** 2025-10-06 21:12:01,965 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:01,975 p=28169 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2025-10-06 21:12:01,975 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:01 +0000 (0:00:00.064) 0:02:13.627 ******** 2025-10-06 21:12:02,072 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,081 p=28169 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_deploy _raw_params={{ hook.type }}.yml] *** 2025-10-06 21:12:02,082 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.106) 0:02:13.734 ******** 2025-10-06 21:12:02,204 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': '80 Kustomize OpenStack CR', 'type': 'playbook', 'source': 'control_plane_horizon.yml'}) 2025-10-06 21:12:02,214 p=28169 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': 'Create coo subscription', 'type': 'playbook', 'source': '/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/playbooks/deploy_cluster_observability_operator.yaml'}) 2025-10-06 21:12:02,229 p=28169 u=zuul n=ansible | TASK [run_hook : Set playbook path for 80 Kustomize OpenStack CR cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e operator_namespace={{ _operator_namespace }} -e namespace={{ _namespace}} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2025-10-06 21:12:02,229 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.147) 0:02:13.882 ******** 2025-10-06 21:12:02,274 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,283 p=28169 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2025-10-06 21:12:02,283 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.053) 0:02:13.936 ******** 2025-10-06 21:12:02,481 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,490 p=28169 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2025-10-06 21:12:02,490 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.206) 0:02:14.142 ******** 2025-10-06 21:12:02,521 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:02,531 p=28169 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2025-10-06 21:12:02,531 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.040) 0:02:14.183 ******** 2025-10-06 21:12:02,717 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,725 p=28169 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2025-10-06 21:12:02,725 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.194) 0:02:14.378 ******** 2025-10-06 21:12:02,766 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,774 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2025-10-06 21:12:02,774 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.048) 0:02:14.427 ******** 2025-10-06 21:12:02,971 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:02,984 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:12:02,985 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:02 +0000 (0:00:00.210) 0:02:14.637 ******** 2025-10-06 21:12:03,166 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:03,179 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook without retry - 80 Kustomize OpenStack CR] *********** 2025-10-06 21:12:03,179 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:03 +0000 (0:00:00.194) 0:02:14.831 ******** 2025-10-06 21:12:03,242 p=28169 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_003_run_hook_without_retry_80.log 2025-10-06 21:12:04,924 p=28169 u=zuul n=ansible | changed: [localhost] 2025-10-06 21:12:04,941 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook with retry - 80 Kustomize OpenStack CR] ************** 2025-10-06 21:12:04,942 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:04 +0000 (0:00:01.762) 0:02:16.594 ******** 2025-10-06 21:12:04,965 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:04,973 p=28169 u=zuul n=ansible | TASK [run_hook : Check if we have a file path={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:12:04,974 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:04 +0000 (0:00:00.031) 0:02:16.626 ******** 2025-10-06 21:12:05,165 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,177 p=28169 u=zuul n=ansible | TASK [run_hook : Load generated content in main playbook file={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2025-10-06 21:12:05,177 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.203) 0:02:16.829 ******** 2025-10-06 21:12:05,198 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:05,212 p=28169 u=zuul n=ansible | TASK [run_hook : Set playbook path for Create coo subscription cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e operator_namespace={{ _operator_namespace }} -e namespace={{ _namespace}} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2025-10-06 21:12:05,212 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.035) 0:02:16.864 ******** 2025-10-06 21:12:05,255 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,267 p=28169 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2025-10-06 21:12:05,267 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.055) 0:02:16.919 ******** 2025-10-06 21:12:05,451 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,459 p=28169 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2025-10-06 21:12:05,459 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.191) 0:02:17.111 ******** 2025-10-06 21:12:05,479 p=28169 u=zuul n=ansible | skipping: [localhost] 2025-10-06 21:12:05,487 p=28169 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2025-10-06 21:12:05,487 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.028) 0:02:17.140 ******** 2025-10-06 21:12:05,662 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,682 p=28169 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2025-10-06 21:12:05,682 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.194) 0:02:17.334 ******** 2025-10-06 21:12:05,707 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,719 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2025-10-06 21:12:05,720 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.037) 0:02:17.372 ******** 2025-10-06 21:12:05,901 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:05,924 p=28169 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2025-10-06 21:12:05,924 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:05 +0000 (0:00:00.204) 0:02:17.576 ******** 2025-10-06 21:12:06,112 p=28169 u=zuul n=ansible | ok: [localhost] 2025-10-06 21:12:06,127 p=28169 u=zuul n=ansible | TASK [run_hook : Run hook without retry - Create coo subscription] ************* 2025-10-06 21:12:06,127 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:06 +0000 (0:00:00.202) 0:02:17.779 ******** 2025-10-06 21:12:06,184 p=28169 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_004_run_hook_without_retry_create.log 2025-10-06 21:12:07,321 p=28169 u=zuul n=ansible | fatal: [localhost]: FAILED! => censored: 'the output has been hidden due to the fact that ''no_log: true'' was specified for this result' changed: true 2025-10-06 21:12:07,322 p=28169 u=zuul n=ansible | NO MORE HOSTS LEFT ************************************************************* 2025-10-06 21:12:07,323 p=28169 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2025-10-06 21:12:07,323 p=28169 u=zuul n=ansible | localhost : ok=147 changed=44 unreachable=0 failed=1 skipped=95 rescued=0 ignored=1 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | Monday 06 October 2025 21:12:07 +0000 (0:00:01.196) 0:02:18.976 ******** 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | =============================================================================== 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | run_hook : Run hook without retry - Download needed tools -------------- 30.81s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | ci_setup : Install needed packages ------------------------------------- 25.45s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | cert_manager : Wait for cert-manager pods to be ready ------------------ 12.17s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | run_hook : Run hook without retry - Fetch nodes facts and save them as parameters -- 10.55s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | repo_setup : Initialize python venv and install requirements ------------ 8.40s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | ci_setup : Install openshift client ------------------------------------- 5.11s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | cert_manager : Install cert-manager from release manifest --------------- 2.50s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | run_hook : Run hook without retry - 80 Kustomize OpenStack CR ----------- 1.76s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | openshift_setup : Create required namespaces ---------------------------- 1.58s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | install_ca : Update ca bundle ------------------------------------------- 1.43s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | cert_manager : Install cert-manager cmctl CLI --------------------------- 1.25s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | run_hook : Run hook without retry - Create coo subscription ------------- 1.20s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | repo_setup : Get repo-setup repository ---------------------------------- 1.18s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | ci_setup : Manage directories ------------------------------------------- 1.09s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | repo_setup : Make sure git-core package is installed -------------------- 1.00s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | openshift_setup : Patch network operator -------------------------------- 0.96s 2025-10-06 21:12:07,324 p=28169 u=zuul n=ansible | openshift_setup : Gather network.operator info -------------------------- 0.93s 2025-10-06 21:12:07,325 p=28169 u=zuul n=ansible | Gathering Facts --------------------------------------------------------- 0.93s 2025-10-06 21:12:07,325 p=28169 u=zuul n=ansible | repo_setup : Install repo-setup package --------------------------------- 0.85s 2025-10-06 21:12:07,325 p=28169 u=zuul n=ansible | openshift_setup : Patch samples registry configuration ------------------ 0.77s home/zuul/zuul-output/logs/ci-framework-data/artifacts/0000755000175000017500000000000015071030372022341 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/0000755000175000017500000000000015071030122023456 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/0000755000175000017500000000000015071030122027511 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/0000755000175000017500000000000015071030352030643 5ustar zuulzuul././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000204415071030124033374 0ustar zuulzuul--- - name: Debug make_openstack_deploy_prep_env when: make_openstack_deploy_prep_env is defined ansible.builtin.debug: var: make_openstack_deploy_prep_env - name: Debug make_openstack_deploy_prep_params when: make_openstack_deploy_prep_params is defined ansible.builtin.debug: var: make_openstack_deploy_prep_params - name: Run openstack_deploy_prep retries: "{{ make_openstack_deploy_prep_retries | default(omit) }}" delay: "{{ make_openstack_deploy_prep_delay | default(omit) }}" until: "{{ make_openstack_deploy_prep_until | default(true) }}" register: "make_openstack_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_deploy_prep" dry_run: "{{ make_openstack_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_deploy_prep_env|default({})), **(make_openstack_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000173115071030124033376 0ustar zuulzuul--- - name: Debug make_openstack_deploy_env when: make_openstack_deploy_env is defined ansible.builtin.debug: var: make_openstack_deploy_env - name: Debug make_openstack_deploy_params when: make_openstack_deploy_params is defined ansible.builtin.debug: var: make_openstack_deploy_params - name: Run openstack_deploy retries: "{{ make_openstack_deploy_retries | default(omit) }}" delay: "{{ make_openstack_deploy_delay | default(omit) }}" until: "{{ make_openstack_deploy_until | default(true) }}" register: "make_openstack_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_deploy" dry_run: "{{ make_openstack_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_deploy_env|default({})), **(make_openstack_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_wait_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000204415071030124033374 0ustar zuulzuul--- - name: Debug make_openstack_wait_deploy_env when: make_openstack_wait_deploy_env is defined ansible.builtin.debug: var: make_openstack_wait_deploy_env - name: Debug make_openstack_wait_deploy_params when: make_openstack_wait_deploy_params is defined ansible.builtin.debug: var: make_openstack_wait_deploy_params - name: Run openstack_wait_deploy retries: "{{ make_openstack_wait_deploy_retries | default(omit) }}" delay: "{{ make_openstack_wait_deploy_delay | default(omit) }}" until: "{{ make_openstack_wait_deploy_until | default(true) }}" register: "make_openstack_wait_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_wait_deploy" dry_run: "{{ make_openstack_wait_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_wait_deploy_env|default({})), **(make_openstack_wait_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000212115071030124033370 0ustar zuulzuul--- - name: Debug make_openstack_deploy_cleanup_env when: make_openstack_deploy_cleanup_env is defined ansible.builtin.debug: var: make_openstack_deploy_cleanup_env - name: Debug make_openstack_deploy_cleanup_params when: make_openstack_deploy_cleanup_params is defined ansible.builtin.debug: var: make_openstack_deploy_cleanup_params - name: Run openstack_deploy_cleanup retries: "{{ make_openstack_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_openstack_deploy_cleanup_delay | default(omit) }}" until: "{{ make_openstack_deploy_cleanup_until | default(true) }}" register: "make_openstack_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_deploy_cleanup" dry_run: "{{ make_openstack_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_deploy_cleanup_env|default({})), **(make_openstack_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_update_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000202515071030124033373 0ustar zuulzuul--- - name: Debug make_openstack_update_run_env when: make_openstack_update_run_env is defined ansible.builtin.debug: var: make_openstack_update_run_env - name: Debug make_openstack_update_run_params when: make_openstack_update_run_params is defined ansible.builtin.debug: var: make_openstack_update_run_params - name: Run openstack_update_run retries: "{{ make_openstack_update_run_retries | default(omit) }}" delay: "{{ make_openstack_update_run_delay | default(omit) }}" until: "{{ make_openstack_update_run_until | default(true) }}" register: "make_openstack_update_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_update_run" dry_run: "{{ make_openstack_update_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_update_run_env|default({})), **(make_openstack_update_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_services.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_s0000644000175000017500000000171215071030124033365 0ustar zuulzuul--- - name: Debug make_update_services_env when: make_update_services_env is defined ansible.builtin.debug: var: make_update_services_env - name: Debug make_update_services_params when: make_update_services_params is defined ansible.builtin.debug: var: make_update_services_params - name: Run update_services retries: "{{ make_update_services_retries | default(omit) }}" delay: "{{ make_update_services_delay | default(omit) }}" until: "{{ make_update_services_until | default(true) }}" register: "make_update_services_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make update_services" dry_run: "{{ make_update_services_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_update_services_env|default({})), **(make_update_services_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespace.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespac0000644000175000017500000000156015071030124033351 0ustar zuulzuul--- - name: Debug make_namespace_env when: make_namespace_env is defined ansible.builtin.debug: var: make_namespace_env - name: Debug make_namespace_params when: make_namespace_params is defined ansible.builtin.debug: var: make_namespace_params - name: Run namespace retries: "{{ make_namespace_retries | default(omit) }}" delay: "{{ make_namespace_delay | default(omit) }}" until: "{{ make_namespace_until | default(true) }}" register: "make_namespace_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make namespace" dry_run: "{{ make_namespace_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_namespace_env|default({})), **(make_namespace_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespace_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespac0000644000175000017500000000175015071030124033352 0ustar zuulzuul--- - name: Debug make_namespace_cleanup_env when: make_namespace_cleanup_env is defined ansible.builtin.debug: var: make_namespace_cleanup_env - name: Debug make_namespace_cleanup_params when: make_namespace_cleanup_params is defined ansible.builtin.debug: var: make_namespace_cleanup_params - name: Run namespace_cleanup retries: "{{ make_namespace_cleanup_retries | default(omit) }}" delay: "{{ make_namespace_cleanup_delay | default(omit) }}" until: "{{ make_namespace_cleanup_until | default(true) }}" register: "make_namespace_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make namespace_cleanup" dry_run: "{{ make_namespace_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_namespace_cleanup_env|default({})), **(make_namespace_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input.ym0000644000175000017500000000146415071030124033350 0ustar zuulzuul--- - name: Debug make_input_env when: make_input_env is defined ansible.builtin.debug: var: make_input_env - name: Debug make_input_params when: make_input_params is defined ansible.builtin.debug: var: make_input_params - name: Run input retries: "{{ make_input_retries | default(omit) }}" delay: "{{ make_input_delay | default(omit) }}" until: "{{ make_input_until | default(true) }}" register: "make_input_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make input" dry_run: "{{ make_input_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_input_env|default({})), **(make_input_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input_cl0000644000175000017500000000165415071030124033403 0ustar zuulzuul--- - name: Debug make_input_cleanup_env when: make_input_cleanup_env is defined ansible.builtin.debug: var: make_input_cleanup_env - name: Debug make_input_cleanup_params when: make_input_cleanup_params is defined ansible.builtin.debug: var: make_input_cleanup_params - name: Run input_cleanup retries: "{{ make_input_cleanup_retries | default(omit) }}" delay: "{{ make_input_cleanup_delay | default(omit) }}" until: "{{ make_input_cleanup_until | default(true) }}" register: "make_input_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make input_cleanup" dry_run: "{{ make_input_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_input_cleanup_env|default({})), **(make_input_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_setup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_0000644000175000017500000000165415071030124033331 0ustar zuulzuul--- - name: Debug make_crc_bmo_setup_env when: make_crc_bmo_setup_env is defined ansible.builtin.debug: var: make_crc_bmo_setup_env - name: Debug make_crc_bmo_setup_params when: make_crc_bmo_setup_params is defined ansible.builtin.debug: var: make_crc_bmo_setup_params - name: Run crc_bmo_setup retries: "{{ make_crc_bmo_setup_retries | default(omit) }}" delay: "{{ make_crc_bmo_setup_delay | default(omit) }}" until: "{{ make_crc_bmo_setup_until | default(true) }}" register: "make_crc_bmo_setup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_bmo_setup" dry_run: "{{ make_crc_bmo_setup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_bmo_setup_env|default({})), **(make_crc_bmo_setup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_0000644000175000017500000000171215071030124033324 0ustar zuulzuul--- - name: Debug make_crc_bmo_cleanup_env when: make_crc_bmo_cleanup_env is defined ansible.builtin.debug: var: make_crc_bmo_cleanup_env - name: Debug make_crc_bmo_cleanup_params when: make_crc_bmo_cleanup_params is defined ansible.builtin.debug: var: make_crc_bmo_cleanup_params - name: Run crc_bmo_cleanup retries: "{{ make_crc_bmo_cleanup_retries | default(omit) }}" delay: "{{ make_crc_bmo_cleanup_delay | default(omit) }}" until: "{{ make_crc_bmo_cleanup_until | default(true) }}" register: "make_crc_bmo_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_bmo_cleanup" dry_run: "{{ make_crc_bmo_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_bmo_cleanup_env|default({})), **(make_crc_bmo_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000167315071030124033403 0ustar zuulzuul--- - name: Debug make_openstack_prep_env when: make_openstack_prep_env is defined ansible.builtin.debug: var: make_openstack_prep_env - name: Debug make_openstack_prep_params when: make_openstack_prep_params is defined ansible.builtin.debug: var: make_openstack_prep_params - name: Run openstack_prep retries: "{{ make_openstack_prep_retries | default(omit) }}" delay: "{{ make_openstack_prep_delay | default(omit) }}" until: "{{ make_openstack_prep_until | default(true) }}" register: "make_openstack_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_prep" dry_run: "{{ make_openstack_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_prep_env|default({})), **(make_openstack_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000156015071030124033376 0ustar zuulzuul--- - name: Debug make_openstack_env when: make_openstack_env is defined ansible.builtin.debug: var: make_openstack_env - name: Debug make_openstack_params when: make_openstack_params is defined ansible.builtin.debug: var: make_openstack_params - name: Run openstack retries: "{{ make_openstack_retries | default(omit) }}" delay: "{{ make_openstack_delay | default(omit) }}" until: "{{ make_openstack_until | default(true) }}" register: "make_openstack_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack" dry_run: "{{ make_openstack_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_env|default({})), **(make_openstack_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_wait.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000167315071030124033403 0ustar zuulzuul--- - name: Debug make_openstack_wait_env when: make_openstack_wait_env is defined ansible.builtin.debug: var: make_openstack_wait_env - name: Debug make_openstack_wait_params when: make_openstack_wait_params is defined ansible.builtin.debug: var: make_openstack_wait_params - name: Run openstack_wait retries: "{{ make_openstack_wait_retries | default(omit) }}" delay: "{{ make_openstack_wait_delay | default(omit) }}" until: "{{ make_openstack_wait_until | default(true) }}" register: "make_openstack_wait_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_wait" dry_run: "{{ make_openstack_wait_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_wait_env|default({})), **(make_openstack_wait_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_init.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000167315071030124033403 0ustar zuulzuul--- - name: Debug make_openstack_init_env when: make_openstack_init_env is defined ansible.builtin.debug: var: make_openstack_init_env - name: Debug make_openstack_init_params when: make_openstack_init_params is defined ansible.builtin.debug: var: make_openstack_init_params - name: Run openstack_init retries: "{{ make_openstack_init_retries | default(omit) }}" delay: "{{ make_openstack_init_delay | default(omit) }}" until: "{{ make_openstack_init_until | default(true) }}" register: "make_openstack_init_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_init" dry_run: "{{ make_openstack_init_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_init_env|default({})), **(make_openstack_init_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000175015071030124033377 0ustar zuulzuul--- - name: Debug make_openstack_cleanup_env when: make_openstack_cleanup_env is defined ansible.builtin.debug: var: make_openstack_cleanup_env - name: Debug make_openstack_cleanup_params when: make_openstack_cleanup_params is defined ansible.builtin.debug: var: make_openstack_cleanup_params - name: Run openstack_cleanup retries: "{{ make_openstack_cleanup_retries | default(omit) }}" delay: "{{ make_openstack_cleanup_delay | default(omit) }}" until: "{{ make_openstack_cleanup_until | default(true) }}" register: "make_openstack_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_cleanup" dry_run: "{{ make_openstack_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_cleanup_env|default({})), **(make_openstack_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_repo.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000167315071030124033403 0ustar zuulzuul--- - name: Debug make_openstack_repo_env when: make_openstack_repo_env is defined ansible.builtin.debug: var: make_openstack_repo_env - name: Debug make_openstack_repo_params when: make_openstack_repo_params is defined ansible.builtin.debug: var: make_openstack_repo_params - name: Run openstack_repo retries: "{{ make_openstack_repo_retries | default(omit) }}" delay: "{{ make_openstack_repo_delay | default(omit) }}" until: "{{ make_openstack_repo_until | default(true) }}" register: "make_openstack_repo_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_repo" dry_run: "{{ make_openstack_repo_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_repo_env|default({})), **(make_openstack_repo_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_all.yml0000644000175000017500000000142615071030124033133 0ustar zuulzuul--- - name: Debug make_all_env when: make_all_env is defined ansible.builtin.debug: var: make_all_env - name: Debug make_all_params when: make_all_params is defined ansible.builtin.debug: var: make_all_params - name: Run all retries: "{{ make_all_retries | default(omit) }}" delay: "{{ make_all_delay | default(omit) }}" until: "{{ make_all_until | default(true) }}" register: "make_all_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make all" dry_run: "{{ make_all_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_all_env|default({})), **(make_all_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_help.yml0000644000175000017500000000145615071030124033316 0ustar zuulzuul--- - name: Debug make_help_env when: make_help_env is defined ansible.builtin.debug: var: make_help_env - name: Debug make_help_params when: make_help_params is defined ansible.builtin.debug: var: make_help_params - name: Run help retries: "{{ make_help_retries | default(omit) }}" delay: "{{ make_help_delay | default(omit) }}" until: "{{ make_help_until | default(true) }}" register: "make_help_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make help" dry_run: "{{ make_help_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_help_env|default({})), **(make_help_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cleanup.0000644000175000017500000000152215071030124033265 0ustar zuulzuul--- - name: Debug make_cleanup_env when: make_cleanup_env is defined ansible.builtin.debug: var: make_cleanup_env - name: Debug make_cleanup_params when: make_cleanup_params is defined ansible.builtin.debug: var: make_cleanup_params - name: Run cleanup retries: "{{ make_cleanup_retries | default(omit) }}" delay: "{{ make_cleanup_delay | default(omit) }}" until: "{{ make_cleanup_until | default(true) }}" register: "make_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cleanup" dry_run: "{{ make_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cleanup_env|default({})), **(make_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_deploy_c0000644000175000017500000000167315071030124033365 0ustar zuulzuul--- - name: Debug make_deploy_cleanup_env when: make_deploy_cleanup_env is defined ansible.builtin.debug: var: make_deploy_cleanup_env - name: Debug make_deploy_cleanup_params when: make_deploy_cleanup_params is defined ansible.builtin.debug: var: make_deploy_cleanup_params - name: Run deploy_cleanup retries: "{{ make_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_deploy_cleanup_delay | default(omit) }}" until: "{{ make_deploy_cleanup_until | default(true) }}" register: "make_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make deploy_cleanup" dry_run: "{{ make_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_deploy_cleanup_env|default({})), **(make_deploy_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_wait.yml0000644000175000017500000000144515071030124033330 0ustar zuulzuul--- - name: Debug make_wait_env when: make_wait_env is defined ansible.builtin.debug: var: make_wait_env - name: Debug make_wait_params when: make_wait_params is defined ansible.builtin.debug: var: make_wait_params - name: Run wait retries: "{{ make_wait_retries | default(omit) }}" delay: "{{ make_wait_delay | default(omit) }}" until: "{{ make_wait_until | default(true) }}" register: "make_wait_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make wait" dry_run: "{{ make_wait_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_wait_env|default({})), **(make_wait_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000161615071030124033402 0ustar zuulzuul--- - name: Debug make_crc_storage_env when: make_crc_storage_env is defined ansible.builtin.debug: var: make_crc_storage_env - name: Debug make_crc_storage_params when: make_crc_storage_params is defined ansible.builtin.debug: var: make_crc_storage_params - name: Run crc_storage retries: "{{ make_crc_storage_retries | default(omit) }}" delay: "{{ make_crc_storage_delay | default(omit) }}" until: "{{ make_crc_storage_until | default(true) }}" register: "make_crc_storage_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage" dry_run: "{{ make_crc_storage_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_env|default({})), **(make_crc_storage_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000200615071030124033374 0ustar zuulzuul--- - name: Debug make_crc_storage_cleanup_env when: make_crc_storage_cleanup_env is defined ansible.builtin.debug: var: make_crc_storage_cleanup_env - name: Debug make_crc_storage_cleanup_params when: make_crc_storage_cleanup_params is defined ansible.builtin.debug: var: make_crc_storage_cleanup_params - name: Run crc_storage_cleanup retries: "{{ make_crc_storage_cleanup_retries | default(omit) }}" delay: "{{ make_crc_storage_cleanup_delay | default(omit) }}" until: "{{ make_crc_storage_cleanup_until | default(true) }}" register: "make_crc_storage_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_cleanup" dry_run: "{{ make_crc_storage_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_cleanup_env|default({})), **(make_crc_storage_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_release.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000200615071030124033374 0ustar zuulzuul--- - name: Debug make_crc_storage_release_env when: make_crc_storage_release_env is defined ansible.builtin.debug: var: make_crc_storage_release_env - name: Debug make_crc_storage_release_params when: make_crc_storage_release_params is defined ansible.builtin.debug: var: make_crc_storage_release_params - name: Run crc_storage_release retries: "{{ make_crc_storage_release_retries | default(omit) }}" delay: "{{ make_crc_storage_release_delay | default(omit) }}" until: "{{ make_crc_storage_release_until | default(true) }}" register: "make_crc_storage_release_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_release" dry_run: "{{ make_crc_storage_release_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_release_env|default({})), **(make_crc_storage_release_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_with_retries.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000212115071030124033372 0ustar zuulzuul--- - name: Debug make_crc_storage_with_retries_env when: make_crc_storage_with_retries_env is defined ansible.builtin.debug: var: make_crc_storage_with_retries_env - name: Debug make_crc_storage_with_retries_params when: make_crc_storage_with_retries_params is defined ansible.builtin.debug: var: make_crc_storage_with_retries_params - name: Run crc_storage_with_retries retries: "{{ make_crc_storage_with_retries_retries | default(omit) }}" delay: "{{ make_crc_storage_with_retries_delay | default(omit) }}" until: "{{ make_crc_storage_with_retries_until | default(true) }}" register: "make_crc_storage_with_retries_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_with_retries" dry_run: "{{ make_crc_storage_with_retries_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_with_retries_env|default({})), **(make_crc_storage_with_retries_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_cleanup_with_retries.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000231115071030124033373 0ustar zuulzuul--- - name: Debug make_crc_storage_cleanup_with_retries_env when: make_crc_storage_cleanup_with_retries_env is defined ansible.builtin.debug: var: make_crc_storage_cleanup_with_retries_env - name: Debug make_crc_storage_cleanup_with_retries_params when: make_crc_storage_cleanup_with_retries_params is defined ansible.builtin.debug: var: make_crc_storage_cleanup_with_retries_params - name: Run crc_storage_cleanup_with_retries retries: "{{ make_crc_storage_cleanup_with_retries_retries | default(omit) }}" delay: "{{ make_crc_storage_cleanup_with_retries_delay | default(omit) }}" until: "{{ make_crc_storage_cleanup_with_retries_until | default(true) }}" register: "make_crc_storage_cleanup_with_retries_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_cleanup_with_retries" dry_run: "{{ make_crc_storage_cleanup_with_retries_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_cleanup_with_retries_env|default({})), **(make_crc_storage_cleanup_with_retries_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_operator_namespace.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_operator0000644000175000017500000000176715071030124033426 0ustar zuulzuul--- - name: Debug make_operator_namespace_env when: make_operator_namespace_env is defined ansible.builtin.debug: var: make_operator_namespace_env - name: Debug make_operator_namespace_params when: make_operator_namespace_params is defined ansible.builtin.debug: var: make_operator_namespace_params - name: Run operator_namespace retries: "{{ make_operator_namespace_retries | default(omit) }}" delay: "{{ make_operator_namespace_delay | default(omit) }}" until: "{{ make_operator_namespace_until | default(true) }}" register: "make_operator_namespace_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make operator_namespace" dry_run: "{{ make_operator_namespace_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_operator_namespace_env|default({})), **(make_operator_namespace_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000167315071030124033353 0ustar zuulzuul--- - name: Debug make_placement_prep_env when: make_placement_prep_env is defined ansible.builtin.debug: var: make_placement_prep_env - name: Debug make_placement_prep_params when: make_placement_prep_params is defined ansible.builtin.debug: var: make_placement_prep_params - name: Run placement_prep retries: "{{ make_placement_prep_retries | default(omit) }}" delay: "{{ make_placement_prep_delay | default(omit) }}" until: "{{ make_placement_prep_until | default(true) }}" register: "make_placement_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_prep" dry_run: "{{ make_placement_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_prep_env|default({})), **(make_placement_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000156015071030124033346 0ustar zuulzuul--- - name: Debug make_placement_env when: make_placement_env is defined ansible.builtin.debug: var: make_placement_env - name: Debug make_placement_params when: make_placement_params is defined ansible.builtin.debug: var: make_placement_params - name: Run placement retries: "{{ make_placement_retries | default(omit) }}" delay: "{{ make_placement_delay | default(omit) }}" until: "{{ make_placement_until | default(true) }}" register: "make_placement_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement" dry_run: "{{ make_placement_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_env|default({})), **(make_placement_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000175015071030124033347 0ustar zuulzuul--- - name: Debug make_placement_cleanup_env when: make_placement_cleanup_env is defined ansible.builtin.debug: var: make_placement_cleanup_env - name: Debug make_placement_cleanup_params when: make_placement_cleanup_params is defined ansible.builtin.debug: var: make_placement_cleanup_params - name: Run placement_cleanup retries: "{{ make_placement_cleanup_retries | default(omit) }}" delay: "{{ make_placement_cleanup_delay | default(omit) }}" until: "{{ make_placement_cleanup_until | default(true) }}" register: "make_placement_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_cleanup" dry_run: "{{ make_placement_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_cleanup_env|default({})), **(make_placement_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000204415071030124033344 0ustar zuulzuul--- - name: Debug make_placement_deploy_prep_env when: make_placement_deploy_prep_env is defined ansible.builtin.debug: var: make_placement_deploy_prep_env - name: Debug make_placement_deploy_prep_params when: make_placement_deploy_prep_params is defined ansible.builtin.debug: var: make_placement_deploy_prep_params - name: Run placement_deploy_prep retries: "{{ make_placement_deploy_prep_retries | default(omit) }}" delay: "{{ make_placement_deploy_prep_delay | default(omit) }}" until: "{{ make_placement_deploy_prep_until | default(true) }}" register: "make_placement_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_deploy_prep" dry_run: "{{ make_placement_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_deploy_prep_env|default({})), **(make_placement_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000173115071030124033346 0ustar zuulzuul--- - name: Debug make_placement_deploy_env when: make_placement_deploy_env is defined ansible.builtin.debug: var: make_placement_deploy_env - name: Debug make_placement_deploy_params when: make_placement_deploy_params is defined ansible.builtin.debug: var: make_placement_deploy_params - name: Run placement_deploy retries: "{{ make_placement_deploy_retries | default(omit) }}" delay: "{{ make_placement_deploy_delay | default(omit) }}" until: "{{ make_placement_deploy_until | default(true) }}" register: "make_placement_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_deploy" dry_run: "{{ make_placement_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_deploy_env|default({})), **(make_placement_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000212115071030124033340 0ustar zuulzuul--- - name: Debug make_placement_deploy_cleanup_env when: make_placement_deploy_cleanup_env is defined ansible.builtin.debug: var: make_placement_deploy_cleanup_env - name: Debug make_placement_deploy_cleanup_params when: make_placement_deploy_cleanup_params is defined ansible.builtin.debug: var: make_placement_deploy_cleanup_params - name: Run placement_deploy_cleanup retries: "{{ make_placement_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_placement_deploy_cleanup_delay | default(omit) }}" until: "{{ make_placement_deploy_cleanup_until | default(true) }}" register: "make_placement_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_deploy_cleanup" dry_run: "{{ make_placement_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_deploy_cleanup_env|default({})), **(make_placement_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_p0000644000175000017500000000161615071030124033334 0ustar zuulzuul--- - name: Debug make_glance_prep_env when: make_glance_prep_env is defined ansible.builtin.debug: var: make_glance_prep_env - name: Debug make_glance_prep_params when: make_glance_prep_params is defined ansible.builtin.debug: var: make_glance_prep_params - name: Run glance_prep retries: "{{ make_glance_prep_retries | default(omit) }}" delay: "{{ make_glance_prep_delay | default(omit) }}" until: "{{ make_glance_prep_until | default(true) }}" register: "make_glance_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_prep" dry_run: "{{ make_glance_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_prep_env|default({})), **(make_glance_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance.y0000644000175000017500000000150315071030124033257 0ustar zuulzuul--- - name: Debug make_glance_env when: make_glance_env is defined ansible.builtin.debug: var: make_glance_env - name: Debug make_glance_params when: make_glance_params is defined ansible.builtin.debug: var: make_glance_params - name: Run glance retries: "{{ make_glance_retries | default(omit) }}" delay: "{{ make_glance_delay | default(omit) }}" until: "{{ make_glance_until | default(true) }}" register: "make_glance_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance" dry_run: "{{ make_glance_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_env|default({})), **(make_glance_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_c0000644000175000017500000000167315071030124033322 0ustar zuulzuul--- - name: Debug make_glance_cleanup_env when: make_glance_cleanup_env is defined ansible.builtin.debug: var: make_glance_cleanup_env - name: Debug make_glance_cleanup_params when: make_glance_cleanup_params is defined ansible.builtin.debug: var: make_glance_cleanup_params - name: Run glance_cleanup retries: "{{ make_glance_cleanup_retries | default(omit) }}" delay: "{{ make_glance_cleanup_delay | default(omit) }}" until: "{{ make_glance_cleanup_until | default(true) }}" register: "make_glance_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_cleanup" dry_run: "{{ make_glance_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_cleanup_env|default({})), **(make_glance_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_d0000644000175000017500000000176715071030124033327 0ustar zuulzuul--- - name: Debug make_glance_deploy_prep_env when: make_glance_deploy_prep_env is defined ansible.builtin.debug: var: make_glance_deploy_prep_env - name: Debug make_glance_deploy_prep_params when: make_glance_deploy_prep_params is defined ansible.builtin.debug: var: make_glance_deploy_prep_params - name: Run glance_deploy_prep retries: "{{ make_glance_deploy_prep_retries | default(omit) }}" delay: "{{ make_glance_deploy_prep_delay | default(omit) }}" until: "{{ make_glance_deploy_prep_until | default(true) }}" register: "make_glance_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_deploy_prep" dry_run: "{{ make_glance_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_deploy_prep_env|default({})), **(make_glance_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_d0000644000175000017500000000165415071030124033322 0ustar zuulzuul--- - name: Debug make_glance_deploy_env when: make_glance_deploy_env is defined ansible.builtin.debug: var: make_glance_deploy_env - name: Debug make_glance_deploy_params when: make_glance_deploy_params is defined ansible.builtin.debug: var: make_glance_deploy_params - name: Run glance_deploy retries: "{{ make_glance_deploy_retries | default(omit) }}" delay: "{{ make_glance_deploy_delay | default(omit) }}" until: "{{ make_glance_deploy_until | default(true) }}" register: "make_glance_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_deploy" dry_run: "{{ make_glance_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_deploy_env|default({})), **(make_glance_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_d0000644000175000017500000000204415071030124033314 0ustar zuulzuul--- - name: Debug make_glance_deploy_cleanup_env when: make_glance_deploy_cleanup_env is defined ansible.builtin.debug: var: make_glance_deploy_cleanup_env - name: Debug make_glance_deploy_cleanup_params when: make_glance_deploy_cleanup_params is defined ansible.builtin.debug: var: make_glance_deploy_cleanup_params - name: Run glance_deploy_cleanup retries: "{{ make_glance_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_glance_deploy_cleanup_delay | default(omit) }}" until: "{{ make_glance_deploy_cleanup_until | default(true) }}" register: "make_glance_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_deploy_cleanup" dry_run: "{{ make_glance_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_deploy_cleanup_env|default({})), **(make_glance_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_prep0000644000175000017500000000154115071030124033411 0ustar zuulzuul--- - name: Debug make_ovn_prep_env when: make_ovn_prep_env is defined ansible.builtin.debug: var: make_ovn_prep_env - name: Debug make_ovn_prep_params when: make_ovn_prep_params is defined ansible.builtin.debug: var: make_ovn_prep_params - name: Run ovn_prep retries: "{{ make_ovn_prep_retries | default(omit) }}" delay: "{{ make_ovn_prep_delay | default(omit) }}" until: "{{ make_ovn_prep_until | default(true) }}" register: "make_ovn_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_prep" dry_run: "{{ make_ovn_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_prep_env|default({})), **(make_ovn_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn.yml0000644000175000017500000000142615071030124033165 0ustar zuulzuul--- - name: Debug make_ovn_env when: make_ovn_env is defined ansible.builtin.debug: var: make_ovn_env - name: Debug make_ovn_params when: make_ovn_params is defined ansible.builtin.debug: var: make_ovn_params - name: Run ovn retries: "{{ make_ovn_retries | default(omit) }}" delay: "{{ make_ovn_delay | default(omit) }}" until: "{{ make_ovn_until | default(true) }}" register: "make_ovn_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn" dry_run: "{{ make_ovn_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_env|default({})), **(make_ovn_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_clea0000644000175000017500000000161615071030124033352 0ustar zuulzuul--- - name: Debug make_ovn_cleanup_env when: make_ovn_cleanup_env is defined ansible.builtin.debug: var: make_ovn_cleanup_env - name: Debug make_ovn_cleanup_params when: make_ovn_cleanup_params is defined ansible.builtin.debug: var: make_ovn_cleanup_params - name: Run ovn_cleanup retries: "{{ make_ovn_cleanup_retries | default(omit) }}" delay: "{{ make_ovn_cleanup_delay | default(omit) }}" until: "{{ make_ovn_cleanup_until | default(true) }}" register: "make_ovn_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_cleanup" dry_run: "{{ make_ovn_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_cleanup_env|default({})), **(make_ovn_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_depl0000644000175000017500000000171215071030124033367 0ustar zuulzuul--- - name: Debug make_ovn_deploy_prep_env when: make_ovn_deploy_prep_env is defined ansible.builtin.debug: var: make_ovn_deploy_prep_env - name: Debug make_ovn_deploy_prep_params when: make_ovn_deploy_prep_params is defined ansible.builtin.debug: var: make_ovn_deploy_prep_params - name: Run ovn_deploy_prep retries: "{{ make_ovn_deploy_prep_retries | default(omit) }}" delay: "{{ make_ovn_deploy_prep_delay | default(omit) }}" until: "{{ make_ovn_deploy_prep_until | default(true) }}" register: "make_ovn_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_deploy_prep" dry_run: "{{ make_ovn_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_deploy_prep_env|default({})), **(make_ovn_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_depl0000644000175000017500000000157715071030124033400 0ustar zuulzuul--- - name: Debug make_ovn_deploy_env when: make_ovn_deploy_env is defined ansible.builtin.debug: var: make_ovn_deploy_env - name: Debug make_ovn_deploy_params when: make_ovn_deploy_params is defined ansible.builtin.debug: var: make_ovn_deploy_params - name: Run ovn_deploy retries: "{{ make_ovn_deploy_retries | default(omit) }}" delay: "{{ make_ovn_deploy_delay | default(omit) }}" until: "{{ make_ovn_deploy_until | default(true) }}" register: "make_ovn_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_deploy" dry_run: "{{ make_ovn_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_deploy_env|default({})), **(make_ovn_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_depl0000644000175000017500000000176715071030124033401 0ustar zuulzuul--- - name: Debug make_ovn_deploy_cleanup_env when: make_ovn_deploy_cleanup_env is defined ansible.builtin.debug: var: make_ovn_deploy_cleanup_env - name: Debug make_ovn_deploy_cleanup_params when: make_ovn_deploy_cleanup_params is defined ansible.builtin.debug: var: make_ovn_deploy_cleanup_params - name: Run ovn_deploy_cleanup retries: "{{ make_ovn_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_ovn_deploy_cleanup_delay | default(omit) }}" until: "{{ make_ovn_deploy_cleanup_until | default(true) }}" register: "make_ovn_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_deploy_cleanup" dry_run: "{{ make_ovn_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_deploy_cleanup_env|default({})), **(make_ovn_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000163515071030124033416 0ustar zuulzuul--- - name: Debug make_neutron_prep_env when: make_neutron_prep_env is defined ansible.builtin.debug: var: make_neutron_prep_env - name: Debug make_neutron_prep_params when: make_neutron_prep_params is defined ansible.builtin.debug: var: make_neutron_prep_params - name: Run neutron_prep retries: "{{ make_neutron_prep_retries | default(omit) }}" delay: "{{ make_neutron_prep_delay | default(omit) }}" until: "{{ make_neutron_prep_until | default(true) }}" register: "make_neutron_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_prep" dry_run: "{{ make_neutron_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_prep_env|default({})), **(make_neutron_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron.0000644000175000017500000000152215071030124033330 0ustar zuulzuul--- - name: Debug make_neutron_env when: make_neutron_env is defined ansible.builtin.debug: var: make_neutron_env - name: Debug make_neutron_params when: make_neutron_params is defined ansible.builtin.debug: var: make_neutron_params - name: Run neutron retries: "{{ make_neutron_retries | default(omit) }}" delay: "{{ make_neutron_delay | default(omit) }}" until: "{{ make_neutron_until | default(true) }}" register: "make_neutron_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron" dry_run: "{{ make_neutron_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_env|default({})), **(make_neutron_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000171215071030124033412 0ustar zuulzuul--- - name: Debug make_neutron_cleanup_env when: make_neutron_cleanup_env is defined ansible.builtin.debug: var: make_neutron_cleanup_env - name: Debug make_neutron_cleanup_params when: make_neutron_cleanup_params is defined ansible.builtin.debug: var: make_neutron_cleanup_params - name: Run neutron_cleanup retries: "{{ make_neutron_cleanup_retries | default(omit) }}" delay: "{{ make_neutron_cleanup_delay | default(omit) }}" until: "{{ make_neutron_cleanup_until | default(true) }}" register: "make_neutron_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_cleanup" dry_run: "{{ make_neutron_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_cleanup_env|default({})), **(make_neutron_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000200615071030124033407 0ustar zuulzuul--- - name: Debug make_neutron_deploy_prep_env when: make_neutron_deploy_prep_env is defined ansible.builtin.debug: var: make_neutron_deploy_prep_env - name: Debug make_neutron_deploy_prep_params when: make_neutron_deploy_prep_params is defined ansible.builtin.debug: var: make_neutron_deploy_prep_params - name: Run neutron_deploy_prep retries: "{{ make_neutron_deploy_prep_retries | default(omit) }}" delay: "{{ make_neutron_deploy_prep_delay | default(omit) }}" until: "{{ make_neutron_deploy_prep_until | default(true) }}" register: "make_neutron_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_deploy_prep" dry_run: "{{ make_neutron_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_deploy_prep_env|default({})), **(make_neutron_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000167315071030124033420 0ustar zuulzuul--- - name: Debug make_neutron_deploy_env when: make_neutron_deploy_env is defined ansible.builtin.debug: var: make_neutron_deploy_env - name: Debug make_neutron_deploy_params when: make_neutron_deploy_params is defined ansible.builtin.debug: var: make_neutron_deploy_params - name: Run neutron_deploy retries: "{{ make_neutron_deploy_retries | default(omit) }}" delay: "{{ make_neutron_deploy_delay | default(omit) }}" until: "{{ make_neutron_deploy_until | default(true) }}" register: "make_neutron_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_deploy" dry_run: "{{ make_neutron_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_deploy_env|default({})), **(make_neutron_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000206315071030124033412 0ustar zuulzuul--- - name: Debug make_neutron_deploy_cleanup_env when: make_neutron_deploy_cleanup_env is defined ansible.builtin.debug: var: make_neutron_deploy_cleanup_env - name: Debug make_neutron_deploy_cleanup_params when: make_neutron_deploy_cleanup_params is defined ansible.builtin.debug: var: make_neutron_deploy_cleanup_params - name: Run neutron_deploy_cleanup retries: "{{ make_neutron_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_neutron_deploy_cleanup_delay | default(omit) }}" until: "{{ make_neutron_deploy_cleanup_until | default(true) }}" register: "make_neutron_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_deploy_cleanup" dry_run: "{{ make_neutron_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_deploy_cleanup_env|default({})), **(make_neutron_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_p0000644000175000017500000000161615071030124033347 0ustar zuulzuul--- - name: Debug make_cinder_prep_env when: make_cinder_prep_env is defined ansible.builtin.debug: var: make_cinder_prep_env - name: Debug make_cinder_prep_params when: make_cinder_prep_params is defined ansible.builtin.debug: var: make_cinder_prep_params - name: Run cinder_prep retries: "{{ make_cinder_prep_retries | default(omit) }}" delay: "{{ make_cinder_prep_delay | default(omit) }}" until: "{{ make_cinder_prep_until | default(true) }}" register: "make_cinder_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_prep" dry_run: "{{ make_cinder_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_prep_env|default({})), **(make_cinder_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder.y0000644000175000017500000000150315071030124033272 0ustar zuulzuul--- - name: Debug make_cinder_env when: make_cinder_env is defined ansible.builtin.debug: var: make_cinder_env - name: Debug make_cinder_params when: make_cinder_params is defined ansible.builtin.debug: var: make_cinder_params - name: Run cinder retries: "{{ make_cinder_retries | default(omit) }}" delay: "{{ make_cinder_delay | default(omit) }}" until: "{{ make_cinder_until | default(true) }}" register: "make_cinder_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder" dry_run: "{{ make_cinder_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_env|default({})), **(make_cinder_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_c0000644000175000017500000000167315071030124033335 0ustar zuulzuul--- - name: Debug make_cinder_cleanup_env when: make_cinder_cleanup_env is defined ansible.builtin.debug: var: make_cinder_cleanup_env - name: Debug make_cinder_cleanup_params when: make_cinder_cleanup_params is defined ansible.builtin.debug: var: make_cinder_cleanup_params - name: Run cinder_cleanup retries: "{{ make_cinder_cleanup_retries | default(omit) }}" delay: "{{ make_cinder_cleanup_delay | default(omit) }}" until: "{{ make_cinder_cleanup_until | default(true) }}" register: "make_cinder_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_cleanup" dry_run: "{{ make_cinder_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_cleanup_env|default({})), **(make_cinder_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_d0000644000175000017500000000176715071030124033342 0ustar zuulzuul--- - name: Debug make_cinder_deploy_prep_env when: make_cinder_deploy_prep_env is defined ansible.builtin.debug: var: make_cinder_deploy_prep_env - name: Debug make_cinder_deploy_prep_params when: make_cinder_deploy_prep_params is defined ansible.builtin.debug: var: make_cinder_deploy_prep_params - name: Run cinder_deploy_prep retries: "{{ make_cinder_deploy_prep_retries | default(omit) }}" delay: "{{ make_cinder_deploy_prep_delay | default(omit) }}" until: "{{ make_cinder_deploy_prep_until | default(true) }}" register: "make_cinder_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_deploy_prep" dry_run: "{{ make_cinder_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_deploy_prep_env|default({})), **(make_cinder_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_d0000644000175000017500000000165415071030124033335 0ustar zuulzuul--- - name: Debug make_cinder_deploy_env when: make_cinder_deploy_env is defined ansible.builtin.debug: var: make_cinder_deploy_env - name: Debug make_cinder_deploy_params when: make_cinder_deploy_params is defined ansible.builtin.debug: var: make_cinder_deploy_params - name: Run cinder_deploy retries: "{{ make_cinder_deploy_retries | default(omit) }}" delay: "{{ make_cinder_deploy_delay | default(omit) }}" until: "{{ make_cinder_deploy_until | default(true) }}" register: "make_cinder_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_deploy" dry_run: "{{ make_cinder_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_deploy_env|default({})), **(make_cinder_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_d0000644000175000017500000000204415071030124033327 0ustar zuulzuul--- - name: Debug make_cinder_deploy_cleanup_env when: make_cinder_deploy_cleanup_env is defined ansible.builtin.debug: var: make_cinder_deploy_cleanup_env - name: Debug make_cinder_deploy_cleanup_params when: make_cinder_deploy_cleanup_params is defined ansible.builtin.debug: var: make_cinder_deploy_cleanup_params - name: Run cinder_deploy_cleanup retries: "{{ make_cinder_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_cinder_deploy_cleanup_delay | default(omit) }}" until: "{{ make_cinder_deploy_cleanup_until | default(true) }}" register: "make_cinder_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_deploy_cleanup" dry_run: "{{ make_cinder_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_deploy_cleanup_env|default({})), **(make_cinder_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000165415071030124033367 0ustar zuulzuul--- - name: Debug make_rabbitmq_prep_env when: make_rabbitmq_prep_env is defined ansible.builtin.debug: var: make_rabbitmq_prep_env - name: Debug make_rabbitmq_prep_params when: make_rabbitmq_prep_params is defined ansible.builtin.debug: var: make_rabbitmq_prep_params - name: Run rabbitmq_prep retries: "{{ make_rabbitmq_prep_retries | default(omit) }}" delay: "{{ make_rabbitmq_prep_delay | default(omit) }}" until: "{{ make_rabbitmq_prep_until | default(true) }}" register: "make_rabbitmq_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_prep" dry_run: "{{ make_rabbitmq_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_prep_env|default({})), **(make_rabbitmq_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000154115071030124033362 0ustar zuulzuul--- - name: Debug make_rabbitmq_env when: make_rabbitmq_env is defined ansible.builtin.debug: var: make_rabbitmq_env - name: Debug make_rabbitmq_params when: make_rabbitmq_params is defined ansible.builtin.debug: var: make_rabbitmq_params - name: Run rabbitmq retries: "{{ make_rabbitmq_retries | default(omit) }}" delay: "{{ make_rabbitmq_delay | default(omit) }}" until: "{{ make_rabbitmq_until | default(true) }}" register: "make_rabbitmq_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq" dry_run: "{{ make_rabbitmq_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_env|default({})), **(make_rabbitmq_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000173115071030124033363 0ustar zuulzuul--- - name: Debug make_rabbitmq_cleanup_env when: make_rabbitmq_cleanup_env is defined ansible.builtin.debug: var: make_rabbitmq_cleanup_env - name: Debug make_rabbitmq_cleanup_params when: make_rabbitmq_cleanup_params is defined ansible.builtin.debug: var: make_rabbitmq_cleanup_params - name: Run rabbitmq_cleanup retries: "{{ make_rabbitmq_cleanup_retries | default(omit) }}" delay: "{{ make_rabbitmq_cleanup_delay | default(omit) }}" until: "{{ make_rabbitmq_cleanup_until | default(true) }}" register: "make_rabbitmq_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_cleanup" dry_run: "{{ make_rabbitmq_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_cleanup_env|default({})), **(make_rabbitmq_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000202515071030124033360 0ustar zuulzuul--- - name: Debug make_rabbitmq_deploy_prep_env when: make_rabbitmq_deploy_prep_env is defined ansible.builtin.debug: var: make_rabbitmq_deploy_prep_env - name: Debug make_rabbitmq_deploy_prep_params when: make_rabbitmq_deploy_prep_params is defined ansible.builtin.debug: var: make_rabbitmq_deploy_prep_params - name: Run rabbitmq_deploy_prep retries: "{{ make_rabbitmq_deploy_prep_retries | default(omit) }}" delay: "{{ make_rabbitmq_deploy_prep_delay | default(omit) }}" until: "{{ make_rabbitmq_deploy_prep_until | default(true) }}" register: "make_rabbitmq_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_deploy_prep" dry_run: "{{ make_rabbitmq_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_deploy_prep_env|default({})), **(make_rabbitmq_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000171215071030124033362 0ustar zuulzuul--- - name: Debug make_rabbitmq_deploy_env when: make_rabbitmq_deploy_env is defined ansible.builtin.debug: var: make_rabbitmq_deploy_env - name: Debug make_rabbitmq_deploy_params when: make_rabbitmq_deploy_params is defined ansible.builtin.debug: var: make_rabbitmq_deploy_params - name: Run rabbitmq_deploy retries: "{{ make_rabbitmq_deploy_retries | default(omit) }}" delay: "{{ make_rabbitmq_deploy_delay | default(omit) }}" until: "{{ make_rabbitmq_deploy_until | default(true) }}" register: "make_rabbitmq_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_deploy" dry_run: "{{ make_rabbitmq_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_deploy_env|default({})), **(make_rabbitmq_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000210215071030124033354 0ustar zuulzuul--- - name: Debug make_rabbitmq_deploy_cleanup_env when: make_rabbitmq_deploy_cleanup_env is defined ansible.builtin.debug: var: make_rabbitmq_deploy_cleanup_env - name: Debug make_rabbitmq_deploy_cleanup_params when: make_rabbitmq_deploy_cleanup_params is defined ansible.builtin.debug: var: make_rabbitmq_deploy_cleanup_params - name: Run rabbitmq_deploy_cleanup retries: "{{ make_rabbitmq_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_rabbitmq_deploy_cleanup_delay | default(omit) }}" until: "{{ make_rabbitmq_deploy_cleanup_until | default(true) }}" register: "make_rabbitmq_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_deploy_cleanup" dry_run: "{{ make_rabbitmq_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_deploy_cleanup_env|default({})), **(make_rabbitmq_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_p0000644000175000017500000000161615071030124033366 0ustar zuulzuul--- - name: Debug make_ironic_prep_env when: make_ironic_prep_env is defined ansible.builtin.debug: var: make_ironic_prep_env - name: Debug make_ironic_prep_params when: make_ironic_prep_params is defined ansible.builtin.debug: var: make_ironic_prep_params - name: Run ironic_prep retries: "{{ make_ironic_prep_retries | default(omit) }}" delay: "{{ make_ironic_prep_delay | default(omit) }}" until: "{{ make_ironic_prep_until | default(true) }}" register: "make_ironic_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_prep" dry_run: "{{ make_ironic_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_prep_env|default({})), **(make_ironic_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic.y0000644000175000017500000000150315071030124033311 0ustar zuulzuul--- - name: Debug make_ironic_env when: make_ironic_env is defined ansible.builtin.debug: var: make_ironic_env - name: Debug make_ironic_params when: make_ironic_params is defined ansible.builtin.debug: var: make_ironic_params - name: Run ironic retries: "{{ make_ironic_retries | default(omit) }}" delay: "{{ make_ironic_delay | default(omit) }}" until: "{{ make_ironic_until | default(true) }}" register: "make_ironic_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic" dry_run: "{{ make_ironic_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_env|default({})), **(make_ironic_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_c0000644000175000017500000000167315071030124033354 0ustar zuulzuul--- - name: Debug make_ironic_cleanup_env when: make_ironic_cleanup_env is defined ansible.builtin.debug: var: make_ironic_cleanup_env - name: Debug make_ironic_cleanup_params when: make_ironic_cleanup_params is defined ansible.builtin.debug: var: make_ironic_cleanup_params - name: Run ironic_cleanup retries: "{{ make_ironic_cleanup_retries | default(omit) }}" delay: "{{ make_ironic_cleanup_delay | default(omit) }}" until: "{{ make_ironic_cleanup_until | default(true) }}" register: "make_ironic_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_cleanup" dry_run: "{{ make_ironic_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_cleanup_env|default({})), **(make_ironic_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_d0000644000175000017500000000176715071030124033361 0ustar zuulzuul--- - name: Debug make_ironic_deploy_prep_env when: make_ironic_deploy_prep_env is defined ansible.builtin.debug: var: make_ironic_deploy_prep_env - name: Debug make_ironic_deploy_prep_params when: make_ironic_deploy_prep_params is defined ansible.builtin.debug: var: make_ironic_deploy_prep_params - name: Run ironic_deploy_prep retries: "{{ make_ironic_deploy_prep_retries | default(omit) }}" delay: "{{ make_ironic_deploy_prep_delay | default(omit) }}" until: "{{ make_ironic_deploy_prep_until | default(true) }}" register: "make_ironic_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_deploy_prep" dry_run: "{{ make_ironic_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_deploy_prep_env|default({})), **(make_ironic_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_d0000644000175000017500000000165415071030124033354 0ustar zuulzuul--- - name: Debug make_ironic_deploy_env when: make_ironic_deploy_env is defined ansible.builtin.debug: var: make_ironic_deploy_env - name: Debug make_ironic_deploy_params when: make_ironic_deploy_params is defined ansible.builtin.debug: var: make_ironic_deploy_params - name: Run ironic_deploy retries: "{{ make_ironic_deploy_retries | default(omit) }}" delay: "{{ make_ironic_deploy_delay | default(omit) }}" until: "{{ make_ironic_deploy_until | default(true) }}" register: "make_ironic_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_deploy" dry_run: "{{ make_ironic_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_deploy_env|default({})), **(make_ironic_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_d0000644000175000017500000000204415071030124033346 0ustar zuulzuul--- - name: Debug make_ironic_deploy_cleanup_env when: make_ironic_deploy_cleanup_env is defined ansible.builtin.debug: var: make_ironic_deploy_cleanup_env - name: Debug make_ironic_deploy_cleanup_params when: make_ironic_deploy_cleanup_params is defined ansible.builtin.debug: var: make_ironic_deploy_cleanup_params - name: Run ironic_deploy_cleanup retries: "{{ make_ironic_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_ironic_deploy_cleanup_delay | default(omit) }}" until: "{{ make_ironic_deploy_cleanup_until | default(true) }}" register: "make_ironic_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_deploy_cleanup" dry_run: "{{ make_ironic_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_deploy_cleanup_env|default({})), **(make_ironic_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000163515071030124033352 0ustar zuulzuul--- - name: Debug make_octavia_prep_env when: make_octavia_prep_env is defined ansible.builtin.debug: var: make_octavia_prep_env - name: Debug make_octavia_prep_params when: make_octavia_prep_params is defined ansible.builtin.debug: var: make_octavia_prep_params - name: Run octavia_prep retries: "{{ make_octavia_prep_retries | default(omit) }}" delay: "{{ make_octavia_prep_delay | default(omit) }}" until: "{{ make_octavia_prep_until | default(true) }}" register: "make_octavia_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_prep" dry_run: "{{ make_octavia_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_prep_env|default({})), **(make_octavia_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia.0000644000175000017500000000152215071030124033264 0ustar zuulzuul--- - name: Debug make_octavia_env when: make_octavia_env is defined ansible.builtin.debug: var: make_octavia_env - name: Debug make_octavia_params when: make_octavia_params is defined ansible.builtin.debug: var: make_octavia_params - name: Run octavia retries: "{{ make_octavia_retries | default(omit) }}" delay: "{{ make_octavia_delay | default(omit) }}" until: "{{ make_octavia_until | default(true) }}" register: "make_octavia_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia" dry_run: "{{ make_octavia_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_env|default({})), **(make_octavia_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000171215071030124033346 0ustar zuulzuul--- - name: Debug make_octavia_cleanup_env when: make_octavia_cleanup_env is defined ansible.builtin.debug: var: make_octavia_cleanup_env - name: Debug make_octavia_cleanup_params when: make_octavia_cleanup_params is defined ansible.builtin.debug: var: make_octavia_cleanup_params - name: Run octavia_cleanup retries: "{{ make_octavia_cleanup_retries | default(omit) }}" delay: "{{ make_octavia_cleanup_delay | default(omit) }}" until: "{{ make_octavia_cleanup_until | default(true) }}" register: "make_octavia_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_cleanup" dry_run: "{{ make_octavia_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_cleanup_env|default({})), **(make_octavia_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000200615071030124033343 0ustar zuulzuul--- - name: Debug make_octavia_deploy_prep_env when: make_octavia_deploy_prep_env is defined ansible.builtin.debug: var: make_octavia_deploy_prep_env - name: Debug make_octavia_deploy_prep_params when: make_octavia_deploy_prep_params is defined ansible.builtin.debug: var: make_octavia_deploy_prep_params - name: Run octavia_deploy_prep retries: "{{ make_octavia_deploy_prep_retries | default(omit) }}" delay: "{{ make_octavia_deploy_prep_delay | default(omit) }}" until: "{{ make_octavia_deploy_prep_until | default(true) }}" register: "make_octavia_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_deploy_prep" dry_run: "{{ make_octavia_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_deploy_prep_env|default({})), **(make_octavia_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000167315071030124033354 0ustar zuulzuul--- - name: Debug make_octavia_deploy_env when: make_octavia_deploy_env is defined ansible.builtin.debug: var: make_octavia_deploy_env - name: Debug make_octavia_deploy_params when: make_octavia_deploy_params is defined ansible.builtin.debug: var: make_octavia_deploy_params - name: Run octavia_deploy retries: "{{ make_octavia_deploy_retries | default(omit) }}" delay: "{{ make_octavia_deploy_delay | default(omit) }}" until: "{{ make_octavia_deploy_until | default(true) }}" register: "make_octavia_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_deploy" dry_run: "{{ make_octavia_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_deploy_env|default({})), **(make_octavia_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000206315071030124033346 0ustar zuulzuul--- - name: Debug make_octavia_deploy_cleanup_env when: make_octavia_deploy_cleanup_env is defined ansible.builtin.debug: var: make_octavia_deploy_cleanup_env - name: Debug make_octavia_deploy_cleanup_params when: make_octavia_deploy_cleanup_params is defined ansible.builtin.debug: var: make_octavia_deploy_cleanup_params - name: Run octavia_deploy_cleanup retries: "{{ make_octavia_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_octavia_deploy_cleanup_delay | default(omit) }}" until: "{{ make_octavia_deploy_cleanup_until | default(true) }}" register: "make_octavia_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_deploy_cleanup" dry_run: "{{ make_octavia_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_deploy_cleanup_env|default({})), **(make_octavia_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000167315071030124033365 0ustar zuulzuul--- - name: Debug make_designate_prep_env when: make_designate_prep_env is defined ansible.builtin.debug: var: make_designate_prep_env - name: Debug make_designate_prep_params when: make_designate_prep_params is defined ansible.builtin.debug: var: make_designate_prep_params - name: Run designate_prep retries: "{{ make_designate_prep_retries | default(omit) }}" delay: "{{ make_designate_prep_delay | default(omit) }}" until: "{{ make_designate_prep_until | default(true) }}" register: "make_designate_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_prep" dry_run: "{{ make_designate_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_prep_env|default({})), **(make_designate_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000156015071030124033360 0ustar zuulzuul--- - name: Debug make_designate_env when: make_designate_env is defined ansible.builtin.debug: var: make_designate_env - name: Debug make_designate_params when: make_designate_params is defined ansible.builtin.debug: var: make_designate_params - name: Run designate retries: "{{ make_designate_retries | default(omit) }}" delay: "{{ make_designate_delay | default(omit) }}" until: "{{ make_designate_until | default(true) }}" register: "make_designate_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate" dry_run: "{{ make_designate_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_env|default({})), **(make_designate_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000175015071030124033361 0ustar zuulzuul--- - name: Debug make_designate_cleanup_env when: make_designate_cleanup_env is defined ansible.builtin.debug: var: make_designate_cleanup_env - name: Debug make_designate_cleanup_params when: make_designate_cleanup_params is defined ansible.builtin.debug: var: make_designate_cleanup_params - name: Run designate_cleanup retries: "{{ make_designate_cleanup_retries | default(omit) }}" delay: "{{ make_designate_cleanup_delay | default(omit) }}" until: "{{ make_designate_cleanup_until | default(true) }}" register: "make_designate_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_cleanup" dry_run: "{{ make_designate_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_cleanup_env|default({})), **(make_designate_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000204415071030124033356 0ustar zuulzuul--- - name: Debug make_designate_deploy_prep_env when: make_designate_deploy_prep_env is defined ansible.builtin.debug: var: make_designate_deploy_prep_env - name: Debug make_designate_deploy_prep_params when: make_designate_deploy_prep_params is defined ansible.builtin.debug: var: make_designate_deploy_prep_params - name: Run designate_deploy_prep retries: "{{ make_designate_deploy_prep_retries | default(omit) }}" delay: "{{ make_designate_deploy_prep_delay | default(omit) }}" until: "{{ make_designate_deploy_prep_until | default(true) }}" register: "make_designate_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_deploy_prep" dry_run: "{{ make_designate_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_deploy_prep_env|default({})), **(make_designate_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000173115071030124033360 0ustar zuulzuul--- - name: Debug make_designate_deploy_env when: make_designate_deploy_env is defined ansible.builtin.debug: var: make_designate_deploy_env - name: Debug make_designate_deploy_params when: make_designate_deploy_params is defined ansible.builtin.debug: var: make_designate_deploy_params - name: Run designate_deploy retries: "{{ make_designate_deploy_retries | default(omit) }}" delay: "{{ make_designate_deploy_delay | default(omit) }}" until: "{{ make_designate_deploy_until | default(true) }}" register: "make_designate_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_deploy" dry_run: "{{ make_designate_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_deploy_env|default({})), **(make_designate_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000212115071030124033352 0ustar zuulzuul--- - name: Debug make_designate_deploy_cleanup_env when: make_designate_deploy_cleanup_env is defined ansible.builtin.debug: var: make_designate_deploy_cleanup_env - name: Debug make_designate_deploy_cleanup_params when: make_designate_deploy_cleanup_params is defined ansible.builtin.debug: var: make_designate_deploy_cleanup_params - name: Run designate_deploy_cleanup retries: "{{ make_designate_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_designate_deploy_cleanup_delay | default(omit) }}" until: "{{ make_designate_deploy_cleanup_until | default(true) }}" register: "make_designate_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_deploy_cleanup" dry_run: "{{ make_designate_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_deploy_cleanup_env|default({})), **(make_designate_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_pre0000644000175000017500000000156015071030124033373 0ustar zuulzuul--- - name: Debug make_nova_prep_env when: make_nova_prep_env is defined ansible.builtin.debug: var: make_nova_prep_env - name: Debug make_nova_prep_params when: make_nova_prep_params is defined ansible.builtin.debug: var: make_nova_prep_params - name: Run nova_prep retries: "{{ make_nova_prep_retries | default(omit) }}" delay: "{{ make_nova_prep_delay | default(omit) }}" until: "{{ make_nova_prep_until | default(true) }}" register: "make_nova_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_prep" dry_run: "{{ make_nova_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_prep_env|default({})), **(make_nova_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova.yml0000644000175000017500000000144515071030124033327 0ustar zuulzuul--- - name: Debug make_nova_env when: make_nova_env is defined ansible.builtin.debug: var: make_nova_env - name: Debug make_nova_params when: make_nova_params is defined ansible.builtin.debug: var: make_nova_params - name: Run nova retries: "{{ make_nova_retries | default(omit) }}" delay: "{{ make_nova_delay | default(omit) }}" until: "{{ make_nova_until | default(true) }}" register: "make_nova_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova" dry_run: "{{ make_nova_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_env|default({})), **(make_nova_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_cle0000644000175000017500000000163515071030124033353 0ustar zuulzuul--- - name: Debug make_nova_cleanup_env when: make_nova_cleanup_env is defined ansible.builtin.debug: var: make_nova_cleanup_env - name: Debug make_nova_cleanup_params when: make_nova_cleanup_params is defined ansible.builtin.debug: var: make_nova_cleanup_params - name: Run nova_cleanup retries: "{{ make_nova_cleanup_retries | default(omit) }}" delay: "{{ make_nova_cleanup_delay | default(omit) }}" until: "{{ make_nova_cleanup_until | default(true) }}" register: "make_nova_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_cleanup" dry_run: "{{ make_nova_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_cleanup_env|default({})), **(make_nova_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_dep0000644000175000017500000000173115071030124033355 0ustar zuulzuul--- - name: Debug make_nova_deploy_prep_env when: make_nova_deploy_prep_env is defined ansible.builtin.debug: var: make_nova_deploy_prep_env - name: Debug make_nova_deploy_prep_params when: make_nova_deploy_prep_params is defined ansible.builtin.debug: var: make_nova_deploy_prep_params - name: Run nova_deploy_prep retries: "{{ make_nova_deploy_prep_retries | default(omit) }}" delay: "{{ make_nova_deploy_prep_delay | default(omit) }}" until: "{{ make_nova_deploy_prep_until | default(true) }}" register: "make_nova_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_deploy_prep" dry_run: "{{ make_nova_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_deploy_prep_env|default({})), **(make_nova_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_dep0000644000175000017500000000161615071030124033357 0ustar zuulzuul--- - name: Debug make_nova_deploy_env when: make_nova_deploy_env is defined ansible.builtin.debug: var: make_nova_deploy_env - name: Debug make_nova_deploy_params when: make_nova_deploy_params is defined ansible.builtin.debug: var: make_nova_deploy_params - name: Run nova_deploy retries: "{{ make_nova_deploy_retries | default(omit) }}" delay: "{{ make_nova_deploy_delay | default(omit) }}" until: "{{ make_nova_deploy_until | default(true) }}" register: "make_nova_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_deploy" dry_run: "{{ make_nova_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_deploy_env|default({})), **(make_nova_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_dep0000644000175000017500000000200615071030124033351 0ustar zuulzuul--- - name: Debug make_nova_deploy_cleanup_env when: make_nova_deploy_cleanup_env is defined ansible.builtin.debug: var: make_nova_deploy_cleanup_env - name: Debug make_nova_deploy_cleanup_params when: make_nova_deploy_cleanup_params is defined ansible.builtin.debug: var: make_nova_deploy_cleanup_params - name: Run nova_deploy_cleanup retries: "{{ make_nova_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_nova_deploy_cleanup_delay | default(omit) }}" until: "{{ make_nova_deploy_cleanup_until | default(true) }}" register: "make_nova_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_deploy_cleanup" dry_run: "{{ make_nova_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_deploy_cleanup_env|default({})), **(make_nova_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000175015071030124033321 0ustar zuulzuul--- - name: Debug make_mariadb_kuttl_run_env when: make_mariadb_kuttl_run_env is defined ansible.builtin.debug: var: make_mariadb_kuttl_run_env - name: Debug make_mariadb_kuttl_run_params when: make_mariadb_kuttl_run_params is defined ansible.builtin.debug: var: make_mariadb_kuttl_run_params - name: Run mariadb_kuttl_run retries: "{{ make_mariadb_kuttl_run_retries | default(omit) }}" delay: "{{ make_mariadb_kuttl_run_delay | default(omit) }}" until: "{{ make_mariadb_kuttl_run_until | default(true) }}" register: "make_mariadb_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_kuttl_run" dry_run: "{{ make_mariadb_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_kuttl_run_env|default({})), **(make_mariadb_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000165415071030124033324 0ustar zuulzuul--- - name: Debug make_mariadb_kuttl_env when: make_mariadb_kuttl_env is defined ansible.builtin.debug: var: make_mariadb_kuttl_env - name: Debug make_mariadb_kuttl_params when: make_mariadb_kuttl_params is defined ansible.builtin.debug: var: make_mariadb_kuttl_params - name: Run mariadb_kuttl retries: "{{ make_mariadb_kuttl_retries | default(omit) }}" delay: "{{ make_mariadb_kuttl_delay | default(omit) }}" until: "{{ make_mariadb_kuttl_until | default(true) }}" register: "make_mariadb_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_kuttl" dry_run: "{{ make_mariadb_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_kuttl_env|default({})), **(make_mariadb_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db0000644000175000017500000000165415071030124033376 0ustar zuulzuul--- - name: Debug make_kuttl_db_prep_env when: make_kuttl_db_prep_env is defined ansible.builtin.debug: var: make_kuttl_db_prep_env - name: Debug make_kuttl_db_prep_params when: make_kuttl_db_prep_params is defined ansible.builtin.debug: var: make_kuttl_db_prep_params - name: Run kuttl_db_prep retries: "{{ make_kuttl_db_prep_retries | default(omit) }}" delay: "{{ make_kuttl_db_prep_delay | default(omit) }}" until: "{{ make_kuttl_db_prep_until | default(true) }}" register: "make_kuttl_db_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_db_prep" dry_run: "{{ make_kuttl_db_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_db_prep_env|default({})), **(make_kuttl_db_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db0000644000175000017500000000173115071030124033372 0ustar zuulzuul--- - name: Debug make_kuttl_db_cleanup_env when: make_kuttl_db_cleanup_env is defined ansible.builtin.debug: var: make_kuttl_db_cleanup_env - name: Debug make_kuttl_db_cleanup_params when: make_kuttl_db_cleanup_params is defined ansible.builtin.debug: var: make_kuttl_db_cleanup_params - name: Run kuttl_db_cleanup retries: "{{ make_kuttl_db_cleanup_retries | default(omit) }}" delay: "{{ make_kuttl_db_cleanup_delay | default(omit) }}" until: "{{ make_kuttl_db_cleanup_until | default(true) }}" register: "make_kuttl_db_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_db_cleanup" dry_run: "{{ make_kuttl_db_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_db_cleanup_env|default({})), **(make_kuttl_db_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_common_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_co0000644000175000017500000000175015071030124033407 0ustar zuulzuul--- - name: Debug make_kuttl_common_prep_env when: make_kuttl_common_prep_env is defined ansible.builtin.debug: var: make_kuttl_common_prep_env - name: Debug make_kuttl_common_prep_params when: make_kuttl_common_prep_params is defined ansible.builtin.debug: var: make_kuttl_common_prep_params - name: Run kuttl_common_prep retries: "{{ make_kuttl_common_prep_retries | default(omit) }}" delay: "{{ make_kuttl_common_prep_delay | default(omit) }}" until: "{{ make_kuttl_common_prep_until | default(true) }}" register: "make_kuttl_common_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_common_prep" dry_run: "{{ make_kuttl_common_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_common_prep_env|default({})), **(make_kuttl_common_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_common_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_co0000644000175000017500000000202515071030124033403 0ustar zuulzuul--- - name: Debug make_kuttl_common_cleanup_env when: make_kuttl_common_cleanup_env is defined ansible.builtin.debug: var: make_kuttl_common_cleanup_env - name: Debug make_kuttl_common_cleanup_params when: make_kuttl_common_cleanup_params is defined ansible.builtin.debug: var: make_kuttl_common_cleanup_params - name: Run kuttl_common_cleanup retries: "{{ make_kuttl_common_cleanup_retries | default(omit) }}" delay: "{{ make_kuttl_common_cleanup_delay | default(omit) }}" until: "{{ make_kuttl_common_cleanup_until | default(true) }}" register: "make_kuttl_common_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_common_cleanup" dry_run: "{{ make_kuttl_common_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_common_cleanup_env|default({})), **(make_kuttl_common_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000176715071030124033434 0ustar zuulzuul--- - name: Debug make_keystone_kuttl_run_env when: make_keystone_kuttl_run_env is defined ansible.builtin.debug: var: make_keystone_kuttl_run_env - name: Debug make_keystone_kuttl_run_params when: make_keystone_kuttl_run_params is defined ansible.builtin.debug: var: make_keystone_kuttl_run_params - name: Run keystone_kuttl_run retries: "{{ make_keystone_kuttl_run_retries | default(omit) }}" delay: "{{ make_keystone_kuttl_run_delay | default(omit) }}" until: "{{ make_keystone_kuttl_run_until | default(true) }}" register: "make_keystone_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_kuttl_run" dry_run: "{{ make_keystone_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_kuttl_run_env|default({})), **(make_keystone_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000167315071030124033430 0ustar zuulzuul--- - name: Debug make_keystone_kuttl_env when: make_keystone_kuttl_env is defined ansible.builtin.debug: var: make_keystone_kuttl_env - name: Debug make_keystone_kuttl_params when: make_keystone_kuttl_params is defined ansible.builtin.debug: var: make_keystone_kuttl_params - name: Run keystone_kuttl retries: "{{ make_keystone_kuttl_retries | default(omit) }}" delay: "{{ make_keystone_kuttl_delay | default(omit) }}" until: "{{ make_keystone_kuttl_until | default(true) }}" register: "make_keystone_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_kuttl" dry_run: "{{ make_keystone_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_kuttl_env|default({})), **(make_keystone_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000176715071030124033334 0ustar zuulzuul--- - name: Debug make_barbican_kuttl_run_env when: make_barbican_kuttl_run_env is defined ansible.builtin.debug: var: make_barbican_kuttl_run_env - name: Debug make_barbican_kuttl_run_params when: make_barbican_kuttl_run_params is defined ansible.builtin.debug: var: make_barbican_kuttl_run_params - name: Run barbican_kuttl_run retries: "{{ make_barbican_kuttl_run_retries | default(omit) }}" delay: "{{ make_barbican_kuttl_run_delay | default(omit) }}" until: "{{ make_barbican_kuttl_run_until | default(true) }}" register: "make_barbican_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_kuttl_run" dry_run: "{{ make_barbican_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_kuttl_run_env|default({})), **(make_barbican_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000167315071030124033330 0ustar zuulzuul--- - name: Debug make_barbican_kuttl_env when: make_barbican_kuttl_env is defined ansible.builtin.debug: var: make_barbican_kuttl_env - name: Debug make_barbican_kuttl_params when: make_barbican_kuttl_params is defined ansible.builtin.debug: var: make_barbican_kuttl_params - name: Run barbican_kuttl retries: "{{ make_barbican_kuttl_retries | default(omit) }}" delay: "{{ make_barbican_kuttl_delay | default(omit) }}" until: "{{ make_barbican_kuttl_until | default(true) }}" register: "make_barbican_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_kuttl" dry_run: "{{ make_barbican_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_kuttl_env|default({})), **(make_barbican_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000200615071030124033342 0ustar zuulzuul--- - name: Debug make_placement_kuttl_run_env when: make_placement_kuttl_run_env is defined ansible.builtin.debug: var: make_placement_kuttl_run_env - name: Debug make_placement_kuttl_run_params when: make_placement_kuttl_run_params is defined ansible.builtin.debug: var: make_placement_kuttl_run_params - name: Run placement_kuttl_run retries: "{{ make_placement_kuttl_run_retries | default(omit) }}" delay: "{{ make_placement_kuttl_run_delay | default(omit) }}" until: "{{ make_placement_kuttl_run_until | default(true) }}" register: "make_placement_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_kuttl_run" dry_run: "{{ make_placement_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_kuttl_run_env|default({})), **(make_placement_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000171215071030124033345 0ustar zuulzuul--- - name: Debug make_placement_kuttl_env when: make_placement_kuttl_env is defined ansible.builtin.debug: var: make_placement_kuttl_env - name: Debug make_placement_kuttl_params when: make_placement_kuttl_params is defined ansible.builtin.debug: var: make_placement_kuttl_params - name: Run placement_kuttl retries: "{{ make_placement_kuttl_retries | default(omit) }}" delay: "{{ make_placement_kuttl_delay | default(omit) }}" until: "{{ make_placement_kuttl_until | default(true) }}" register: "make_placement_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_kuttl" dry_run: "{{ make_placement_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_kuttl_env|default({})), **(make_placement_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kutt0000644000175000017500000000156015071030124033433 0ustar zuulzuul--- - name: Debug make_ovn_kuttl_env when: make_ovn_kuttl_env is defined ansible.builtin.debug: var: make_ovn_kuttl_env - name: Debug make_ovn_kuttl_params when: make_ovn_kuttl_params is defined ansible.builtin.debug: var: make_ovn_kuttl_params - name: Run ovn_kuttl retries: "{{ make_ovn_kuttl_retries | default(omit) }}" delay: "{{ make_ovn_kuttl_delay | default(omit) }}" until: "{{ make_ovn_kuttl_until | default(true) }}" register: "make_ovn_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_kuttl" dry_run: "{{ make_ovn_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_kuttl_env|default({})), **(make_ovn_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_k0000644000175000017500000000173115071030124033340 0ustar zuulzuul--- - name: Debug make_cinder_kuttl_run_env when: make_cinder_kuttl_run_env is defined ansible.builtin.debug: var: make_cinder_kuttl_run_env - name: Debug make_cinder_kuttl_run_params when: make_cinder_kuttl_run_params is defined ansible.builtin.debug: var: make_cinder_kuttl_run_params - name: Run cinder_kuttl_run retries: "{{ make_cinder_kuttl_run_retries | default(omit) }}" delay: "{{ make_cinder_kuttl_run_delay | default(omit) }}" until: "{{ make_cinder_kuttl_run_until | default(true) }}" register: "make_cinder_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_kuttl_run" dry_run: "{{ make_cinder_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_kuttl_run_env|default({})), **(make_cinder_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_k0000644000175000017500000000163515071030124033343 0ustar zuulzuul--- - name: Debug make_cinder_kuttl_env when: make_cinder_kuttl_env is defined ansible.builtin.debug: var: make_cinder_kuttl_env - name: Debug make_cinder_kuttl_params when: make_cinder_kuttl_params is defined ansible.builtin.debug: var: make_cinder_kuttl_params - name: Run cinder_kuttl retries: "{{ make_cinder_kuttl_retries | default(omit) }}" delay: "{{ make_cinder_kuttl_delay | default(omit) }}" until: "{{ make_cinder_kuttl_until | default(true) }}" register: "make_cinder_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_kuttl" dry_run: "{{ make_cinder_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_kuttl_env|default({})), **(make_cinder_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000175015071030124033414 0ustar zuulzuul--- - name: Debug make_neutron_kuttl_run_env when: make_neutron_kuttl_run_env is defined ansible.builtin.debug: var: make_neutron_kuttl_run_env - name: Debug make_neutron_kuttl_run_params when: make_neutron_kuttl_run_params is defined ansible.builtin.debug: var: make_neutron_kuttl_run_params - name: Run neutron_kuttl_run retries: "{{ make_neutron_kuttl_run_retries | default(omit) }}" delay: "{{ make_neutron_kuttl_run_delay | default(omit) }}" until: "{{ make_neutron_kuttl_run_until | default(true) }}" register: "make_neutron_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_kuttl_run" dry_run: "{{ make_neutron_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_kuttl_run_env|default({})), **(make_neutron_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000165415071030124033417 0ustar zuulzuul--- - name: Debug make_neutron_kuttl_env when: make_neutron_kuttl_env is defined ansible.builtin.debug: var: make_neutron_kuttl_env - name: Debug make_neutron_kuttl_params when: make_neutron_kuttl_params is defined ansible.builtin.debug: var: make_neutron_kuttl_params - name: Run neutron_kuttl retries: "{{ make_neutron_kuttl_retries | default(omit) }}" delay: "{{ make_neutron_kuttl_delay | default(omit) }}" until: "{{ make_neutron_kuttl_until | default(true) }}" register: "make_neutron_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_kuttl" dry_run: "{{ make_neutron_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_kuttl_env|default({})), **(make_neutron_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000175015071030124033350 0ustar zuulzuul--- - name: Debug make_octavia_kuttl_run_env when: make_octavia_kuttl_run_env is defined ansible.builtin.debug: var: make_octavia_kuttl_run_env - name: Debug make_octavia_kuttl_run_params when: make_octavia_kuttl_run_params is defined ansible.builtin.debug: var: make_octavia_kuttl_run_params - name: Run octavia_kuttl_run retries: "{{ make_octavia_kuttl_run_retries | default(omit) }}" delay: "{{ make_octavia_kuttl_run_delay | default(omit) }}" until: "{{ make_octavia_kuttl_run_until | default(true) }}" register: "make_octavia_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_kuttl_run" dry_run: "{{ make_octavia_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_kuttl_run_env|default({})), **(make_octavia_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000165415071030124033353 0ustar zuulzuul--- - name: Debug make_octavia_kuttl_env when: make_octavia_kuttl_env is defined ansible.builtin.debug: var: make_octavia_kuttl_env - name: Debug make_octavia_kuttl_params when: make_octavia_kuttl_params is defined ansible.builtin.debug: var: make_octavia_kuttl_params - name: Run octavia_kuttl retries: "{{ make_octavia_kuttl_retries | default(omit) }}" delay: "{{ make_octavia_kuttl_delay | default(omit) }}" until: "{{ make_octavia_kuttl_until | default(true) }}" register: "make_octavia_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_kuttl" dry_run: "{{ make_octavia_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_kuttl_env|default({})), **(make_octavia_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000171215071030124033357 0ustar zuulzuul--- - name: Debug make_designate_kuttl_env when: make_designate_kuttl_env is defined ansible.builtin.debug: var: make_designate_kuttl_env - name: Debug make_designate_kuttl_params when: make_designate_kuttl_params is defined ansible.builtin.debug: var: make_designate_kuttl_params - name: Run designate_kuttl retries: "{{ make_designate_kuttl_retries | default(omit) }}" delay: "{{ make_designate_kuttl_delay | default(omit) }}" until: "{{ make_designate_kuttl_until | default(true) }}" register: "make_designate_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_kuttl" dry_run: "{{ make_designate_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_kuttl_env|default({})), **(make_designate_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000200615071030124033354 0ustar zuulzuul--- - name: Debug make_designate_kuttl_run_env when: make_designate_kuttl_run_env is defined ansible.builtin.debug: var: make_designate_kuttl_run_env - name: Debug make_designate_kuttl_run_params when: make_designate_kuttl_run_params is defined ansible.builtin.debug: var: make_designate_kuttl_run_params - name: Run designate_kuttl_run retries: "{{ make_designate_kuttl_run_retries | default(omit) }}" delay: "{{ make_designate_kuttl_run_delay | default(omit) }}" until: "{{ make_designate_kuttl_run_until | default(true) }}" register: "make_designate_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_kuttl_run" dry_run: "{{ make_designate_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_kuttl_run_env|default({})), **(make_designate_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kutt0000644000175000017500000000165415071030124033437 0ustar zuulzuul--- - name: Debug make_ovn_kuttl_run_env when: make_ovn_kuttl_run_env is defined ansible.builtin.debug: var: make_ovn_kuttl_run_env - name: Debug make_ovn_kuttl_run_params when: make_ovn_kuttl_run_params is defined ansible.builtin.debug: var: make_ovn_kuttl_run_params - name: Run ovn_kuttl_run retries: "{{ make_ovn_kuttl_run_retries | default(omit) }}" delay: "{{ make_ovn_kuttl_run_delay | default(omit) }}" until: "{{ make_ovn_kuttl_run_until | default(true) }}" register: "make_ovn_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_kuttl_run" dry_run: "{{ make_ovn_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_kuttl_run_env|default({})), **(make_ovn_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_ku0000644000175000017500000000171215071030124033357 0ustar zuulzuul--- - name: Debug make_infra_kuttl_run_env when: make_infra_kuttl_run_env is defined ansible.builtin.debug: var: make_infra_kuttl_run_env - name: Debug make_infra_kuttl_run_params when: make_infra_kuttl_run_params is defined ansible.builtin.debug: var: make_infra_kuttl_run_params - name: Run infra_kuttl_run retries: "{{ make_infra_kuttl_run_retries | default(omit) }}" delay: "{{ make_infra_kuttl_run_delay | default(omit) }}" until: "{{ make_infra_kuttl_run_until | default(true) }}" register: "make_infra_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_kuttl_run" dry_run: "{{ make_infra_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_kuttl_run_env|default({})), **(make_infra_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_ku0000644000175000017500000000161615071030124033362 0ustar zuulzuul--- - name: Debug make_infra_kuttl_env when: make_infra_kuttl_env is defined ansible.builtin.debug: var: make_infra_kuttl_env - name: Debug make_infra_kuttl_params when: make_infra_kuttl_params is defined ansible.builtin.debug: var: make_infra_kuttl_params - name: Run infra_kuttl retries: "{{ make_infra_kuttl_retries | default(omit) }}" delay: "{{ make_infra_kuttl_delay | default(omit) }}" until: "{{ make_infra_kuttl_until | default(true) }}" register: "make_infra_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_kuttl" dry_run: "{{ make_infra_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_kuttl_env|default({})), **(make_infra_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_k0000644000175000017500000000173115071030124033357 0ustar zuulzuul--- - name: Debug make_ironic_kuttl_run_env when: make_ironic_kuttl_run_env is defined ansible.builtin.debug: var: make_ironic_kuttl_run_env - name: Debug make_ironic_kuttl_run_params when: make_ironic_kuttl_run_params is defined ansible.builtin.debug: var: make_ironic_kuttl_run_params - name: Run ironic_kuttl_run retries: "{{ make_ironic_kuttl_run_retries | default(omit) }}" delay: "{{ make_ironic_kuttl_run_delay | default(omit) }}" until: "{{ make_ironic_kuttl_run_until | default(true) }}" register: "make_ironic_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_kuttl_run" dry_run: "{{ make_ironic_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_kuttl_run_env|default({})), **(make_ironic_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_k0000644000175000017500000000163515071030124033362 0ustar zuulzuul--- - name: Debug make_ironic_kuttl_env when: make_ironic_kuttl_env is defined ansible.builtin.debug: var: make_ironic_kuttl_env - name: Debug make_ironic_kuttl_params when: make_ironic_kuttl_params is defined ansible.builtin.debug: var: make_ironic_kuttl_params - name: Run ironic_kuttl retries: "{{ make_ironic_kuttl_retries | default(omit) }}" delay: "{{ make_ironic_kuttl_delay | default(omit) }}" until: "{{ make_ironic_kuttl_until | default(true) }}" register: "make_ironic_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_kuttl" dry_run: "{{ make_ironic_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_kuttl_env|default({})), **(make_ironic_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_kuttl_crc.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_k0000644000175000017500000000173115071030124033357 0ustar zuulzuul--- - name: Debug make_ironic_kuttl_crc_env when: make_ironic_kuttl_crc_env is defined ansible.builtin.debug: var: make_ironic_kuttl_crc_env - name: Debug make_ironic_kuttl_crc_params when: make_ironic_kuttl_crc_params is defined ansible.builtin.debug: var: make_ironic_kuttl_crc_params - name: Run ironic_kuttl_crc retries: "{{ make_ironic_kuttl_crc_retries | default(omit) }}" delay: "{{ make_ironic_kuttl_crc_delay | default(omit) }}" until: "{{ make_ironic_kuttl_crc_until | default(true) }}" register: "make_ironic_kuttl_crc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_kuttl_crc" dry_run: "{{ make_ironic_kuttl_crc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_kuttl_crc_env|default({})), **(make_ironic_kuttl_crc_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kut0000644000175000017500000000167315071030124033373 0ustar zuulzuul--- - name: Debug make_heat_kuttl_run_env when: make_heat_kuttl_run_env is defined ansible.builtin.debug: var: make_heat_kuttl_run_env - name: Debug make_heat_kuttl_run_params when: make_heat_kuttl_run_params is defined ansible.builtin.debug: var: make_heat_kuttl_run_params - name: Run heat_kuttl_run retries: "{{ make_heat_kuttl_run_retries | default(omit) }}" delay: "{{ make_heat_kuttl_run_delay | default(omit) }}" until: "{{ make_heat_kuttl_run_until | default(true) }}" register: "make_heat_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_kuttl_run" dry_run: "{{ make_heat_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_kuttl_run_env|default({})), **(make_heat_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kut0000644000175000017500000000157715071030124033376 0ustar zuulzuul--- - name: Debug make_heat_kuttl_env when: make_heat_kuttl_env is defined ansible.builtin.debug: var: make_heat_kuttl_env - name: Debug make_heat_kuttl_params when: make_heat_kuttl_params is defined ansible.builtin.debug: var: make_heat_kuttl_params - name: Run heat_kuttl retries: "{{ make_heat_kuttl_retries | default(omit) }}" delay: "{{ make_heat_kuttl_delay | default(omit) }}" until: "{{ make_heat_kuttl_until | default(true) }}" register: "make_heat_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_kuttl" dry_run: "{{ make_heat_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_kuttl_env|default({})), **(make_heat_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kuttl_crc.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kut0000644000175000017500000000167315071030124033373 0ustar zuulzuul--- - name: Debug make_heat_kuttl_crc_env when: make_heat_kuttl_crc_env is defined ansible.builtin.debug: var: make_heat_kuttl_crc_env - name: Debug make_heat_kuttl_crc_params when: make_heat_kuttl_crc_params is defined ansible.builtin.debug: var: make_heat_kuttl_crc_params - name: Run heat_kuttl_crc retries: "{{ make_heat_kuttl_crc_retries | default(omit) }}" delay: "{{ make_heat_kuttl_crc_delay | default(omit) }}" until: "{{ make_heat_kuttl_crc_until | default(true) }}" register: "make_heat_kuttl_crc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_kuttl_crc" dry_run: "{{ make_heat_kuttl_crc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_kuttl_crc_env|default({})), **(make_heat_kuttl_crc_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000200615071030124033340 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_run_env when: make_ansibleee_kuttl_run_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_run_env - name: Debug make_ansibleee_kuttl_run_params when: make_ansibleee_kuttl_run_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_run_params - name: Run ansibleee_kuttl_run retries: "{{ make_ansibleee_kuttl_run_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_run_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_run_until | default(true) }}" register: "make_ansibleee_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl_run" dry_run: "{{ make_ansibleee_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_run_env|default({})), **(make_ansibleee_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000210215071030124033335 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_cleanup_env when: make_ansibleee_kuttl_cleanup_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_cleanup_env - name: Debug make_ansibleee_kuttl_cleanup_params when: make_ansibleee_kuttl_cleanup_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_cleanup_params - name: Run ansibleee_kuttl_cleanup retries: "{{ make_ansibleee_kuttl_cleanup_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_cleanup_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_cleanup_until | default(true) }}" register: "make_ansibleee_kuttl_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl_cleanup" dry_run: "{{ make_ansibleee_kuttl_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_cleanup_env|default({})), **(make_ansibleee_kuttl_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000202515071030124033341 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_prep_env when: make_ansibleee_kuttl_prep_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_prep_env - name: Debug make_ansibleee_kuttl_prep_params when: make_ansibleee_kuttl_prep_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_prep_params - name: Run ansibleee_kuttl_prep retries: "{{ make_ansibleee_kuttl_prep_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_prep_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_prep_until | default(true) }}" register: "make_ansibleee_kuttl_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl_prep" dry_run: "{{ make_ansibleee_kuttl_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_prep_env|default({})), **(make_ansibleee_kuttl_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000171215071030124033343 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_env when: make_ansibleee_kuttl_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_env - name: Debug make_ansibleee_kuttl_params when: make_ansibleee_kuttl_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_params - name: Run ansibleee_kuttl retries: "{{ make_ansibleee_kuttl_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_until | default(true) }}" register: "make_ansibleee_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl" dry_run: "{{ make_ansibleee_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_env|default({})), **(make_ansibleee_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_k0000644000175000017500000000173115071030124033325 0ustar zuulzuul--- - name: Debug make_glance_kuttl_run_env when: make_glance_kuttl_run_env is defined ansible.builtin.debug: var: make_glance_kuttl_run_env - name: Debug make_glance_kuttl_run_params when: make_glance_kuttl_run_params is defined ansible.builtin.debug: var: make_glance_kuttl_run_params - name: Run glance_kuttl_run retries: "{{ make_glance_kuttl_run_retries | default(omit) }}" delay: "{{ make_glance_kuttl_run_delay | default(omit) }}" until: "{{ make_glance_kuttl_run_until | default(true) }}" register: "make_glance_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_kuttl_run" dry_run: "{{ make_glance_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_kuttl_run_env|default({})), **(make_glance_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_k0000644000175000017500000000163515071030124033330 0ustar zuulzuul--- - name: Debug make_glance_kuttl_env when: make_glance_kuttl_env is defined ansible.builtin.debug: var: make_glance_kuttl_env - name: Debug make_glance_kuttl_params when: make_glance_kuttl_params is defined ansible.builtin.debug: var: make_glance_kuttl_params - name: Run glance_kuttl retries: "{{ make_glance_kuttl_retries | default(omit) }}" delay: "{{ make_glance_kuttl_delay | default(omit) }}" until: "{{ make_glance_kuttl_until | default(true) }}" register: "make_glance_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_kuttl" dry_run: "{{ make_glance_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_kuttl_env|default({})), **(make_glance_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_k0000644000175000017500000000173115071030124033335 0ustar zuulzuul--- - name: Debug make_manila_kuttl_run_env when: make_manila_kuttl_run_env is defined ansible.builtin.debug: var: make_manila_kuttl_run_env - name: Debug make_manila_kuttl_run_params when: make_manila_kuttl_run_params is defined ansible.builtin.debug: var: make_manila_kuttl_run_params - name: Run manila_kuttl_run retries: "{{ make_manila_kuttl_run_retries | default(omit) }}" delay: "{{ make_manila_kuttl_run_delay | default(omit) }}" until: "{{ make_manila_kuttl_run_until | default(true) }}" register: "make_manila_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_kuttl_run" dry_run: "{{ make_manila_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_kuttl_run_env|default({})), **(make_manila_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_k0000644000175000017500000000163515071030124033340 0ustar zuulzuul--- - name: Debug make_manila_kuttl_env when: make_manila_kuttl_env is defined ansible.builtin.debug: var: make_manila_kuttl_env - name: Debug make_manila_kuttl_params when: make_manila_kuttl_params is defined ansible.builtin.debug: var: make_manila_kuttl_params - name: Run manila_kuttl retries: "{{ make_manila_kuttl_retries | default(omit) }}" delay: "{{ make_manila_kuttl_delay | default(omit) }}" until: "{{ make_manila_kuttl_until | default(true) }}" register: "make_manila_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_kuttl" dry_run: "{{ make_manila_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_kuttl_env|default({})), **(make_manila_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_ku0000644000175000017500000000171215071030124033414 0ustar zuulzuul--- - name: Debug make_swift_kuttl_run_env when: make_swift_kuttl_run_env is defined ansible.builtin.debug: var: make_swift_kuttl_run_env - name: Debug make_swift_kuttl_run_params when: make_swift_kuttl_run_params is defined ansible.builtin.debug: var: make_swift_kuttl_run_params - name: Run swift_kuttl_run retries: "{{ make_swift_kuttl_run_retries | default(omit) }}" delay: "{{ make_swift_kuttl_run_delay | default(omit) }}" until: "{{ make_swift_kuttl_run_until | default(true) }}" register: "make_swift_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_kuttl_run" dry_run: "{{ make_swift_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_kuttl_run_env|default({})), **(make_swift_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_ku0000644000175000017500000000161615071030124033417 0ustar zuulzuul--- - name: Debug make_swift_kuttl_env when: make_swift_kuttl_env is defined ansible.builtin.debug: var: make_swift_kuttl_env - name: Debug make_swift_kuttl_params when: make_swift_kuttl_params is defined ansible.builtin.debug: var: make_swift_kuttl_params - name: Run swift_kuttl retries: "{{ make_swift_kuttl_retries | default(omit) }}" delay: "{{ make_swift_kuttl_delay | default(omit) }}" until: "{{ make_swift_kuttl_until | default(true) }}" register: "make_swift_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_kuttl" dry_run: "{{ make_swift_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_kuttl_env|default({})), **(make_swift_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000175015071030124033412 0ustar zuulzuul--- - name: Debug make_horizon_kuttl_run_env when: make_horizon_kuttl_run_env is defined ansible.builtin.debug: var: make_horizon_kuttl_run_env - name: Debug make_horizon_kuttl_run_params when: make_horizon_kuttl_run_params is defined ansible.builtin.debug: var: make_horizon_kuttl_run_params - name: Run horizon_kuttl_run retries: "{{ make_horizon_kuttl_run_retries | default(omit) }}" delay: "{{ make_horizon_kuttl_run_delay | default(omit) }}" until: "{{ make_horizon_kuttl_run_until | default(true) }}" register: "make_horizon_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_kuttl_run" dry_run: "{{ make_horizon_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_kuttl_run_env|default({})), **(make_horizon_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000165415071030124033415 0ustar zuulzuul--- - name: Debug make_horizon_kuttl_env when: make_horizon_kuttl_env is defined ansible.builtin.debug: var: make_horizon_kuttl_env - name: Debug make_horizon_kuttl_params when: make_horizon_kuttl_params is defined ansible.builtin.debug: var: make_horizon_kuttl_params - name: Run horizon_kuttl retries: "{{ make_horizon_kuttl_retries | default(omit) }}" delay: "{{ make_horizon_kuttl_delay | default(omit) }}" until: "{{ make_horizon_kuttl_until | default(true) }}" register: "make_horizon_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_kuttl" dry_run: "{{ make_horizon_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_kuttl_env|default({})), **(make_horizon_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000200615071030124033372 0ustar zuulzuul--- - name: Debug make_openstack_kuttl_run_env when: make_openstack_kuttl_run_env is defined ansible.builtin.debug: var: make_openstack_kuttl_run_env - name: Debug make_openstack_kuttl_run_params when: make_openstack_kuttl_run_params is defined ansible.builtin.debug: var: make_openstack_kuttl_run_params - name: Run openstack_kuttl_run retries: "{{ make_openstack_kuttl_run_retries | default(omit) }}" delay: "{{ make_openstack_kuttl_run_delay | default(omit) }}" until: "{{ make_openstack_kuttl_run_until | default(true) }}" register: "make_openstack_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_kuttl_run" dry_run: "{{ make_openstack_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_kuttl_run_env|default({})), **(make_openstack_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000171215071030124033375 0ustar zuulzuul--- - name: Debug make_openstack_kuttl_env when: make_openstack_kuttl_env is defined ansible.builtin.debug: var: make_openstack_kuttl_env - name: Debug make_openstack_kuttl_params when: make_openstack_kuttl_params is defined ansible.builtin.debug: var: make_openstack_kuttl_params - name: Run openstack_kuttl retries: "{{ make_openstack_kuttl_retries | default(omit) }}" delay: "{{ make_openstack_kuttl_delay | default(omit) }}" until: "{{ make_openstack_kuttl_until | default(true) }}" register: "make_openstack_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_kuttl" dry_run: "{{ make_openstack_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_kuttl_env|default({})), **(make_openstack_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_chainsaw_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000202515071030124033315 0ustar zuulzuul--- - name: Debug make_mariadb_chainsaw_run_env when: make_mariadb_chainsaw_run_env is defined ansible.builtin.debug: var: make_mariadb_chainsaw_run_env - name: Debug make_mariadb_chainsaw_run_params when: make_mariadb_chainsaw_run_params is defined ansible.builtin.debug: var: make_mariadb_chainsaw_run_params - name: Run mariadb_chainsaw_run retries: "{{ make_mariadb_chainsaw_run_retries | default(omit) }}" delay: "{{ make_mariadb_chainsaw_run_delay | default(omit) }}" until: "{{ make_mariadb_chainsaw_run_until | default(true) }}" register: "make_mariadb_chainsaw_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_chainsaw_run" dry_run: "{{ make_mariadb_chainsaw_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_chainsaw_run_env|default({})), **(make_mariadb_chainsaw_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_chainsaw.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000173115071030124033320 0ustar zuulzuul--- - name: Debug make_mariadb_chainsaw_env when: make_mariadb_chainsaw_env is defined ansible.builtin.debug: var: make_mariadb_chainsaw_env - name: Debug make_mariadb_chainsaw_params when: make_mariadb_chainsaw_params is defined ansible.builtin.debug: var: make_mariadb_chainsaw_params - name: Run mariadb_chainsaw retries: "{{ make_mariadb_chainsaw_retries | default(omit) }}" delay: "{{ make_mariadb_chainsaw_delay | default(omit) }}" until: "{{ make_mariadb_chainsaw_until | default(true) }}" register: "make_mariadb_chainsaw_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_chainsaw" dry_run: "{{ make_mariadb_chainsaw_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_chainsaw_env|default({})), **(make_mariadb_chainsaw_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000163515071030124033414 0ustar zuulzuul--- - name: Debug make_horizon_prep_env when: make_horizon_prep_env is defined ansible.builtin.debug: var: make_horizon_prep_env - name: Debug make_horizon_prep_params when: make_horizon_prep_params is defined ansible.builtin.debug: var: make_horizon_prep_params - name: Run horizon_prep retries: "{{ make_horizon_prep_retries | default(omit) }}" delay: "{{ make_horizon_prep_delay | default(omit) }}" until: "{{ make_horizon_prep_until | default(true) }}" register: "make_horizon_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_prep" dry_run: "{{ make_horizon_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_prep_env|default({})), **(make_horizon_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon.0000644000175000017500000000152215071030124033326 0ustar zuulzuul--- - name: Debug make_horizon_env when: make_horizon_env is defined ansible.builtin.debug: var: make_horizon_env - name: Debug make_horizon_params when: make_horizon_params is defined ansible.builtin.debug: var: make_horizon_params - name: Run horizon retries: "{{ make_horizon_retries | default(omit) }}" delay: "{{ make_horizon_delay | default(omit) }}" until: "{{ make_horizon_until | default(true) }}" register: "make_horizon_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon" dry_run: "{{ make_horizon_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_env|default({})), **(make_horizon_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000171215071030124033410 0ustar zuulzuul--- - name: Debug make_horizon_cleanup_env when: make_horizon_cleanup_env is defined ansible.builtin.debug: var: make_horizon_cleanup_env - name: Debug make_horizon_cleanup_params when: make_horizon_cleanup_params is defined ansible.builtin.debug: var: make_horizon_cleanup_params - name: Run horizon_cleanup retries: "{{ make_horizon_cleanup_retries | default(omit) }}" delay: "{{ make_horizon_cleanup_delay | default(omit) }}" until: "{{ make_horizon_cleanup_until | default(true) }}" register: "make_horizon_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_cleanup" dry_run: "{{ make_horizon_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_cleanup_env|default({})), **(make_horizon_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000200615071030124033405 0ustar zuulzuul--- - name: Debug make_horizon_deploy_prep_env when: make_horizon_deploy_prep_env is defined ansible.builtin.debug: var: make_horizon_deploy_prep_env - name: Debug make_horizon_deploy_prep_params when: make_horizon_deploy_prep_params is defined ansible.builtin.debug: var: make_horizon_deploy_prep_params - name: Run horizon_deploy_prep retries: "{{ make_horizon_deploy_prep_retries | default(omit) }}" delay: "{{ make_horizon_deploy_prep_delay | default(omit) }}" until: "{{ make_horizon_deploy_prep_until | default(true) }}" register: "make_horizon_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_deploy_prep" dry_run: "{{ make_horizon_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_deploy_prep_env|default({})), **(make_horizon_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000167315071030124033416 0ustar zuulzuul--- - name: Debug make_horizon_deploy_env when: make_horizon_deploy_env is defined ansible.builtin.debug: var: make_horizon_deploy_env - name: Debug make_horizon_deploy_params when: make_horizon_deploy_params is defined ansible.builtin.debug: var: make_horizon_deploy_params - name: Run horizon_deploy retries: "{{ make_horizon_deploy_retries | default(omit) }}" delay: "{{ make_horizon_deploy_delay | default(omit) }}" until: "{{ make_horizon_deploy_until | default(true) }}" register: "make_horizon_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_deploy" dry_run: "{{ make_horizon_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_deploy_env|default({})), **(make_horizon_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000206315071030124033410 0ustar zuulzuul--- - name: Debug make_horizon_deploy_cleanup_env when: make_horizon_deploy_cleanup_env is defined ansible.builtin.debug: var: make_horizon_deploy_cleanup_env - name: Debug make_horizon_deploy_cleanup_params when: make_horizon_deploy_cleanup_params is defined ansible.builtin.debug: var: make_horizon_deploy_cleanup_params - name: Run horizon_deploy_cleanup retries: "{{ make_horizon_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_horizon_deploy_cleanup_delay | default(omit) }}" until: "{{ make_horizon_deploy_cleanup_until | default(true) }}" register: "make_horizon_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_deploy_cleanup" dry_run: "{{ make_horizon_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_deploy_cleanup_env|default({})), **(make_horizon_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_pre0000644000175000017500000000156015071030124033351 0ustar zuulzuul--- - name: Debug make_heat_prep_env when: make_heat_prep_env is defined ansible.builtin.debug: var: make_heat_prep_env - name: Debug make_heat_prep_params when: make_heat_prep_params is defined ansible.builtin.debug: var: make_heat_prep_params - name: Run heat_prep retries: "{{ make_heat_prep_retries | default(omit) }}" delay: "{{ make_heat_prep_delay | default(omit) }}" until: "{{ make_heat_prep_until | default(true) }}" register: "make_heat_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_prep" dry_run: "{{ make_heat_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_prep_env|default({})), **(make_heat_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat.yml0000644000175000017500000000144515071030124033305 0ustar zuulzuul--- - name: Debug make_heat_env when: make_heat_env is defined ansible.builtin.debug: var: make_heat_env - name: Debug make_heat_params when: make_heat_params is defined ansible.builtin.debug: var: make_heat_params - name: Run heat retries: "{{ make_heat_retries | default(omit) }}" delay: "{{ make_heat_delay | default(omit) }}" until: "{{ make_heat_until | default(true) }}" register: "make_heat_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat" dry_run: "{{ make_heat_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_env|default({})), **(make_heat_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_cle0000644000175000017500000000163515071030124033331 0ustar zuulzuul--- - name: Debug make_heat_cleanup_env when: make_heat_cleanup_env is defined ansible.builtin.debug: var: make_heat_cleanup_env - name: Debug make_heat_cleanup_params when: make_heat_cleanup_params is defined ansible.builtin.debug: var: make_heat_cleanup_params - name: Run heat_cleanup retries: "{{ make_heat_cleanup_retries | default(omit) }}" delay: "{{ make_heat_cleanup_delay | default(omit) }}" until: "{{ make_heat_cleanup_until | default(true) }}" register: "make_heat_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_cleanup" dry_run: "{{ make_heat_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_cleanup_env|default({})), **(make_heat_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_dep0000644000175000017500000000173115071030124033333 0ustar zuulzuul--- - name: Debug make_heat_deploy_prep_env when: make_heat_deploy_prep_env is defined ansible.builtin.debug: var: make_heat_deploy_prep_env - name: Debug make_heat_deploy_prep_params when: make_heat_deploy_prep_params is defined ansible.builtin.debug: var: make_heat_deploy_prep_params - name: Run heat_deploy_prep retries: "{{ make_heat_deploy_prep_retries | default(omit) }}" delay: "{{ make_heat_deploy_prep_delay | default(omit) }}" until: "{{ make_heat_deploy_prep_until | default(true) }}" register: "make_heat_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_deploy_prep" dry_run: "{{ make_heat_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_deploy_prep_env|default({})), **(make_heat_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_dep0000644000175000017500000000161615071030124033335 0ustar zuulzuul--- - name: Debug make_heat_deploy_env when: make_heat_deploy_env is defined ansible.builtin.debug: var: make_heat_deploy_env - name: Debug make_heat_deploy_params when: make_heat_deploy_params is defined ansible.builtin.debug: var: make_heat_deploy_params - name: Run heat_deploy retries: "{{ make_heat_deploy_retries | default(omit) }}" delay: "{{ make_heat_deploy_delay | default(omit) }}" until: "{{ make_heat_deploy_until | default(true) }}" register: "make_heat_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_deploy" dry_run: "{{ make_heat_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_deploy_env|default({})), **(make_heat_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_dep0000644000175000017500000000200615071030124033327 0ustar zuulzuul--- - name: Debug make_heat_deploy_cleanup_env when: make_heat_deploy_cleanup_env is defined ansible.builtin.debug: var: make_heat_deploy_cleanup_env - name: Debug make_heat_deploy_cleanup_params when: make_heat_deploy_cleanup_params is defined ansible.builtin.debug: var: make_heat_deploy_cleanup_params - name: Run heat_deploy_cleanup retries: "{{ make_heat_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_heat_deploy_cleanup_delay | default(omit) }}" until: "{{ make_heat_deploy_cleanup_until | default(true) }}" register: "make_heat_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_deploy_cleanup" dry_run: "{{ make_heat_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_deploy_cleanup_env|default({})), **(make_heat_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000167315071030124033351 0ustar zuulzuul--- - name: Debug make_ansibleee_prep_env when: make_ansibleee_prep_env is defined ansible.builtin.debug: var: make_ansibleee_prep_env - name: Debug make_ansibleee_prep_params when: make_ansibleee_prep_params is defined ansible.builtin.debug: var: make_ansibleee_prep_params - name: Run ansibleee_prep retries: "{{ make_ansibleee_prep_retries | default(omit) }}" delay: "{{ make_ansibleee_prep_delay | default(omit) }}" until: "{{ make_ansibleee_prep_until | default(true) }}" register: "make_ansibleee_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_prep" dry_run: "{{ make_ansibleee_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_prep_env|default({})), **(make_ansibleee_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000156015071030124033344 0ustar zuulzuul--- - name: Debug make_ansibleee_env when: make_ansibleee_env is defined ansible.builtin.debug: var: make_ansibleee_env - name: Debug make_ansibleee_params when: make_ansibleee_params is defined ansible.builtin.debug: var: make_ansibleee_params - name: Run ansibleee retries: "{{ make_ansibleee_retries | default(omit) }}" delay: "{{ make_ansibleee_delay | default(omit) }}" until: "{{ make_ansibleee_until | default(true) }}" register: "make_ansibleee_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee" dry_run: "{{ make_ansibleee_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_env|default({})), **(make_ansibleee_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000175015071030124033345 0ustar zuulzuul--- - name: Debug make_ansibleee_cleanup_env when: make_ansibleee_cleanup_env is defined ansible.builtin.debug: var: make_ansibleee_cleanup_env - name: Debug make_ansibleee_cleanup_params when: make_ansibleee_cleanup_params is defined ansible.builtin.debug: var: make_ansibleee_cleanup_params - name: Run ansibleee_cleanup retries: "{{ make_ansibleee_cleanup_retries | default(omit) }}" delay: "{{ make_ansibleee_cleanup_delay | default(omit) }}" until: "{{ make_ansibleee_cleanup_until | default(true) }}" register: "make_ansibleee_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_cleanup" dry_run: "{{ make_ansibleee_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_cleanup_env|default({})), **(make_ansibleee_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremetal_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremeta0000644000175000017500000000167315071030124033347 0ustar zuulzuul--- - name: Debug make_baremetal_prep_env when: make_baremetal_prep_env is defined ansible.builtin.debug: var: make_baremetal_prep_env - name: Debug make_baremetal_prep_params when: make_baremetal_prep_params is defined ansible.builtin.debug: var: make_baremetal_prep_params - name: Run baremetal_prep retries: "{{ make_baremetal_prep_retries | default(omit) }}" delay: "{{ make_baremetal_prep_delay | default(omit) }}" until: "{{ make_baremetal_prep_until | default(true) }}" register: "make_baremetal_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make baremetal_prep" dry_run: "{{ make_baremetal_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_baremetal_prep_env|default({})), **(make_baremetal_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremetal.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremeta0000644000175000017500000000156015071030124033342 0ustar zuulzuul--- - name: Debug make_baremetal_env when: make_baremetal_env is defined ansible.builtin.debug: var: make_baremetal_env - name: Debug make_baremetal_params when: make_baremetal_params is defined ansible.builtin.debug: var: make_baremetal_params - name: Run baremetal retries: "{{ make_baremetal_retries | default(omit) }}" delay: "{{ make_baremetal_delay | default(omit) }}" until: "{{ make_baremetal_until | default(true) }}" register: "make_baremetal_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make baremetal" dry_run: "{{ make_baremetal_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_baremetal_env|default({})), **(make_baremetal_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremetal_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremeta0000644000175000017500000000175015071030124033343 0ustar zuulzuul--- - name: Debug make_baremetal_cleanup_env when: make_baremetal_cleanup_env is defined ansible.builtin.debug: var: make_baremetal_cleanup_env - name: Debug make_baremetal_cleanup_params when: make_baremetal_cleanup_params is defined ansible.builtin.debug: var: make_baremetal_cleanup_params - name: Run baremetal_cleanup retries: "{{ make_baremetal_cleanup_retries | default(omit) }}" delay: "{{ make_baremetal_cleanup_delay | default(omit) }}" until: "{{ make_baremetal_cleanup_until | default(true) }}" register: "make_baremetal_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make baremetal_cleanup" dry_run: "{{ make_baremetal_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_baremetal_cleanup_env|default({})), **(make_baremetal_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_help.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_hel0000644000175000017500000000156015071030124033331 0ustar zuulzuul--- - name: Debug make_ceph_help_env when: make_ceph_help_env is defined ansible.builtin.debug: var: make_ceph_help_env - name: Debug make_ceph_help_params when: make_ceph_help_params is defined ansible.builtin.debug: var: make_ceph_help_params - name: Run ceph_help retries: "{{ make_ceph_help_retries | default(omit) }}" delay: "{{ make_ceph_help_delay | default(omit) }}" until: "{{ make_ceph_help_until | default(true) }}" register: "make_ceph_help_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ceph_help" dry_run: "{{ make_ceph_help_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ceph_help_env|default({})), **(make_ceph_help_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph.yml0000644000175000017500000000144515071030124033303 0ustar zuulzuul--- - name: Debug make_ceph_env when: make_ceph_env is defined ansible.builtin.debug: var: make_ceph_env - name: Debug make_ceph_params when: make_ceph_params is defined ansible.builtin.debug: var: make_ceph_params - name: Run ceph retries: "{{ make_ceph_retries | default(omit) }}" delay: "{{ make_ceph_delay | default(omit) }}" until: "{{ make_ceph_until | default(true) }}" register: "make_ceph_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ceph" dry_run: "{{ make_ceph_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ceph_env|default({})), **(make_ceph_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_cle0000644000175000017500000000163515071030124033327 0ustar zuulzuul--- - name: Debug make_ceph_cleanup_env when: make_ceph_cleanup_env is defined ansible.builtin.debug: var: make_ceph_cleanup_env - name: Debug make_ceph_cleanup_params when: make_ceph_cleanup_params is defined ansible.builtin.debug: var: make_ceph_cleanup_params - name: Run ceph_cleanup retries: "{{ make_ceph_cleanup_retries | default(omit) }}" delay: "{{ make_ceph_cleanup_delay | default(omit) }}" until: "{{ make_ceph_cleanup_until | default(true) }}" register: "make_ceph_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ceph_cleanup" dry_run: "{{ make_ceph_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ceph_cleanup_env|default({})), **(make_ceph_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_pre0000644000175000017500000000156015071030124033402 0ustar zuulzuul--- - name: Debug make_rook_prep_env when: make_rook_prep_env is defined ansible.builtin.debug: var: make_rook_prep_env - name: Debug make_rook_prep_params when: make_rook_prep_params is defined ansible.builtin.debug: var: make_rook_prep_params - name: Run rook_prep retries: "{{ make_rook_prep_retries | default(omit) }}" delay: "{{ make_rook_prep_delay | default(omit) }}" until: "{{ make_rook_prep_until | default(true) }}" register: "make_rook_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_prep" dry_run: "{{ make_rook_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_prep_env|default({})), **(make_rook_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook.yml0000644000175000017500000000144515071030124033336 0ustar zuulzuul--- - name: Debug make_rook_env when: make_rook_env is defined ansible.builtin.debug: var: make_rook_env - name: Debug make_rook_params when: make_rook_params is defined ansible.builtin.debug: var: make_rook_params - name: Run rook retries: "{{ make_rook_retries | default(omit) }}" delay: "{{ make_rook_delay | default(omit) }}" until: "{{ make_rook_until | default(true) }}" register: "make_rook_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook" dry_run: "{{ make_rook_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_env|default({})), **(make_rook_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_dep0000644000175000017500000000173115071030124033364 0ustar zuulzuul--- - name: Debug make_rook_deploy_prep_env when: make_rook_deploy_prep_env is defined ansible.builtin.debug: var: make_rook_deploy_prep_env - name: Debug make_rook_deploy_prep_params when: make_rook_deploy_prep_params is defined ansible.builtin.debug: var: make_rook_deploy_prep_params - name: Run rook_deploy_prep retries: "{{ make_rook_deploy_prep_retries | default(omit) }}" delay: "{{ make_rook_deploy_prep_delay | default(omit) }}" until: "{{ make_rook_deploy_prep_until | default(true) }}" register: "make_rook_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_deploy_prep" dry_run: "{{ make_rook_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_deploy_prep_env|default({})), **(make_rook_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_dep0000644000175000017500000000161615071030124033366 0ustar zuulzuul--- - name: Debug make_rook_deploy_env when: make_rook_deploy_env is defined ansible.builtin.debug: var: make_rook_deploy_env - name: Debug make_rook_deploy_params when: make_rook_deploy_params is defined ansible.builtin.debug: var: make_rook_deploy_params - name: Run rook_deploy retries: "{{ make_rook_deploy_retries | default(omit) }}" delay: "{{ make_rook_deploy_delay | default(omit) }}" until: "{{ make_rook_deploy_until | default(true) }}" register: "make_rook_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_deploy" dry_run: "{{ make_rook_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_deploy_env|default({})), **(make_rook_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_crc_disk.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_crc0000644000175000017500000000165415071030124033367 0ustar zuulzuul--- - name: Debug make_rook_crc_disk_env when: make_rook_crc_disk_env is defined ansible.builtin.debug: var: make_rook_crc_disk_env - name: Debug make_rook_crc_disk_params when: make_rook_crc_disk_params is defined ansible.builtin.debug: var: make_rook_crc_disk_params - name: Run rook_crc_disk retries: "{{ make_rook_crc_disk_retries | default(omit) }}" delay: "{{ make_rook_crc_disk_delay | default(omit) }}" until: "{{ make_rook_crc_disk_until | default(true) }}" register: "make_rook_crc_disk_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_crc_disk" dry_run: "{{ make_rook_crc_disk_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_crc_disk_env|default({})), **(make_rook_crc_disk_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_cle0000644000175000017500000000163515071030124033362 0ustar zuulzuul--- - name: Debug make_rook_cleanup_env when: make_rook_cleanup_env is defined ansible.builtin.debug: var: make_rook_cleanup_env - name: Debug make_rook_cleanup_params when: make_rook_cleanup_params is defined ansible.builtin.debug: var: make_rook_cleanup_params - name: Run rook_cleanup retries: "{{ make_rook_cleanup_retries | default(omit) }}" delay: "{{ make_rook_cleanup_delay | default(omit) }}" until: "{{ make_rook_cleanup_until | default(true) }}" register: "make_rook_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_cleanup" dry_run: "{{ make_rook_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_cleanup_env|default({})), **(make_rook_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_lvms.yml0000644000175000017500000000144515071030124033345 0ustar zuulzuul--- - name: Debug make_lvms_env when: make_lvms_env is defined ansible.builtin.debug: var: make_lvms_env - name: Debug make_lvms_params when: make_lvms_params is defined ansible.builtin.debug: var: make_lvms_params - name: Run lvms retries: "{{ make_lvms_retries | default(omit) }}" delay: "{{ make_lvms_delay | default(omit) }}" until: "{{ make_lvms_until | default(true) }}" register: "make_lvms_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make lvms" dry_run: "{{ make_lvms_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_lvms_env|default({})), **(make_lvms_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nmstate.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nmstate.0000644000175000017500000000152215071030124033311 0ustar zuulzuul--- - name: Debug make_nmstate_env when: make_nmstate_env is defined ansible.builtin.debug: var: make_nmstate_env - name: Debug make_nmstate_params when: make_nmstate_params is defined ansible.builtin.debug: var: make_nmstate_params - name: Run nmstate retries: "{{ make_nmstate_retries | default(omit) }}" delay: "{{ make_nmstate_delay | default(omit) }}" until: "{{ make_nmstate_until | default(true) }}" register: "make_nmstate_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nmstate" dry_run: "{{ make_nmstate_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nmstate_env|default({})), **(make_nmstate_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nncp.yml0000644000175000017500000000144515071030124033322 0ustar zuulzuul--- - name: Debug make_nncp_env when: make_nncp_env is defined ansible.builtin.debug: var: make_nncp_env - name: Debug make_nncp_params when: make_nncp_params is defined ansible.builtin.debug: var: make_nncp_params - name: Run nncp retries: "{{ make_nncp_retries | default(omit) }}" delay: "{{ make_nncp_delay | default(omit) }}" until: "{{ make_nncp_until | default(true) }}" register: "make_nncp_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nncp" dry_run: "{{ make_nncp_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nncp_env|default({})), **(make_nncp_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nncp_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nncp_cle0000644000175000017500000000163515071030124033346 0ustar zuulzuul--- - name: Debug make_nncp_cleanup_env when: make_nncp_cleanup_env is defined ansible.builtin.debug: var: make_nncp_cleanup_env - name: Debug make_nncp_cleanup_params when: make_nncp_cleanup_params is defined ansible.builtin.debug: var: make_nncp_cleanup_params - name: Run nncp_cleanup retries: "{{ make_nncp_cleanup_retries | default(omit) }}" delay: "{{ make_nncp_cleanup_delay | default(omit) }}" until: "{{ make_nncp_cleanup_until | default(true) }}" register: "make_nncp_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nncp_cleanup" dry_run: "{{ make_nncp_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nncp_cleanup_env|default({})), **(make_nncp_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattach.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattac0000644000175000017500000000156015071030124033365 0ustar zuulzuul--- - name: Debug make_netattach_env when: make_netattach_env is defined ansible.builtin.debug: var: make_netattach_env - name: Debug make_netattach_params when: make_netattach_params is defined ansible.builtin.debug: var: make_netattach_params - name: Run netattach retries: "{{ make_netattach_retries | default(omit) }}" delay: "{{ make_netattach_delay | default(omit) }}" until: "{{ make_netattach_until | default(true) }}" register: "make_netattach_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netattach" dry_run: "{{ make_netattach_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netattach_env|default({})), **(make_netattach_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattach_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattac0000644000175000017500000000175015071030124033366 0ustar zuulzuul--- - name: Debug make_netattach_cleanup_env when: make_netattach_cleanup_env is defined ansible.builtin.debug: var: make_netattach_cleanup_env - name: Debug make_netattach_cleanup_params when: make_netattach_cleanup_params is defined ansible.builtin.debug: var: make_netattach_cleanup_params - name: Run netattach_cleanup retries: "{{ make_netattach_cleanup_retries | default(omit) }}" delay: "{{ make_netattach_cleanup_delay | default(omit) }}" until: "{{ make_netattach_cleanup_until | default(true) }}" register: "make_netattach_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netattach_cleanup" dry_run: "{{ make_netattach_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netattach_cleanup_env|default({})), **(make_netattach_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb.0000644000175000017500000000152215071030124033256 0ustar zuulzuul--- - name: Debug make_metallb_env when: make_metallb_env is defined ansible.builtin.debug: var: make_metallb_env - name: Debug make_metallb_params when: make_metallb_params is defined ansible.builtin.debug: var: make_metallb_params - name: Run metallb retries: "{{ make_metallb_retries | default(omit) }}" delay: "{{ make_metallb_delay | default(omit) }}" until: "{{ make_metallb_until | default(true) }}" register: "make_metallb_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb" dry_run: "{{ make_metallb_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_env|default({})), **(make_metallb_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_config.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_0000644000175000017500000000167315071030124033346 0ustar zuulzuul--- - name: Debug make_metallb_config_env when: make_metallb_config_env is defined ansible.builtin.debug: var: make_metallb_config_env - name: Debug make_metallb_config_params when: make_metallb_config_params is defined ansible.builtin.debug: var: make_metallb_config_params - name: Run metallb_config retries: "{{ make_metallb_config_retries | default(omit) }}" delay: "{{ make_metallb_config_delay | default(omit) }}" until: "{{ make_metallb_config_until | default(true) }}" register: "make_metallb_config_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb_config" dry_run: "{{ make_metallb_config_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_config_env|default({})), **(make_metallb_config_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_config_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_0000644000175000017500000000206315071030124033340 0ustar zuulzuul--- - name: Debug make_metallb_config_cleanup_env when: make_metallb_config_cleanup_env is defined ansible.builtin.debug: var: make_metallb_config_cleanup_env - name: Debug make_metallb_config_cleanup_params when: make_metallb_config_cleanup_params is defined ansible.builtin.debug: var: make_metallb_config_cleanup_params - name: Run metallb_config_cleanup retries: "{{ make_metallb_config_cleanup_retries | default(omit) }}" delay: "{{ make_metallb_config_cleanup_delay | default(omit) }}" until: "{{ make_metallb_config_cleanup_until | default(true) }}" register: "make_metallb_config_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb_config_cleanup" dry_run: "{{ make_metallb_config_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_config_cleanup_env|default({})), **(make_metallb_config_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_0000644000175000017500000000171215071030124033340 0ustar zuulzuul--- - name: Debug make_metallb_cleanup_env when: make_metallb_cleanup_env is defined ansible.builtin.debug: var: make_metallb_cleanup_env - name: Debug make_metallb_cleanup_params when: make_metallb_cleanup_params is defined ansible.builtin.debug: var: make_metallb_cleanup_params - name: Run metallb_cleanup retries: "{{ make_metallb_cleanup_retries | default(omit) }}" delay: "{{ make_metallb_cleanup_delay | default(omit) }}" until: "{{ make_metallb_cleanup_until | default(true) }}" register: "make_metallb_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb_cleanup" dry_run: "{{ make_metallb_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_cleanup_env|default({})), **(make_metallb_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki.yml0000644000175000017500000000144515071030124033322 0ustar zuulzuul--- - name: Debug make_loki_env when: make_loki_env is defined ansible.builtin.debug: var: make_loki_env - name: Debug make_loki_params when: make_loki_params is defined ansible.builtin.debug: var: make_loki_params - name: Run loki retries: "{{ make_loki_retries | default(omit) }}" delay: "{{ make_loki_delay | default(omit) }}" until: "{{ make_loki_until | default(true) }}" register: "make_loki_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki" dry_run: "{{ make_loki_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_env|default({})), **(make_loki_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_cle0000644000175000017500000000163515071030124033346 0ustar zuulzuul--- - name: Debug make_loki_cleanup_env when: make_loki_cleanup_env is defined ansible.builtin.debug: var: make_loki_cleanup_env - name: Debug make_loki_cleanup_params when: make_loki_cleanup_params is defined ansible.builtin.debug: var: make_loki_cleanup_params - name: Run loki_cleanup retries: "{{ make_loki_cleanup_retries | default(omit) }}" delay: "{{ make_loki_cleanup_delay | default(omit) }}" until: "{{ make_loki_cleanup_until | default(true) }}" register: "make_loki_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki_cleanup" dry_run: "{{ make_loki_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_cleanup_env|default({})), **(make_loki_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_dep0000644000175000017500000000161615071030124033352 0ustar zuulzuul--- - name: Debug make_loki_deploy_env when: make_loki_deploy_env is defined ansible.builtin.debug: var: make_loki_deploy_env - name: Debug make_loki_deploy_params when: make_loki_deploy_params is defined ansible.builtin.debug: var: make_loki_deploy_params - name: Run loki_deploy retries: "{{ make_loki_deploy_retries | default(omit) }}" delay: "{{ make_loki_deploy_delay | default(omit) }}" until: "{{ make_loki_deploy_until | default(true) }}" register: "make_loki_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki_deploy" dry_run: "{{ make_loki_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_deploy_env|default({})), **(make_loki_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_dep0000644000175000017500000000200615071030124033344 0ustar zuulzuul--- - name: Debug make_loki_deploy_cleanup_env when: make_loki_deploy_cleanup_env is defined ansible.builtin.debug: var: make_loki_deploy_cleanup_env - name: Debug make_loki_deploy_cleanup_params when: make_loki_deploy_cleanup_params is defined ansible.builtin.debug: var: make_loki_deploy_cleanup_params - name: Run loki_deploy_cleanup retries: "{{ make_loki_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_loki_deploy_cleanup_delay | default(omit) }}" until: "{{ make_loki_deploy_cleanup_until | default(true) }}" register: "make_loki_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki_deploy_cleanup" dry_run: "{{ make_loki_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_deploy_cleanup_env|default({})), **(make_loki_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000156015071030124033403 0ustar zuulzuul--- - name: Debug make_netobserv_env when: make_netobserv_env is defined ansible.builtin.debug: var: make_netobserv_env - name: Debug make_netobserv_params when: make_netobserv_params is defined ansible.builtin.debug: var: make_netobserv_params - name: Run netobserv retries: "{{ make_netobserv_retries | default(omit) }}" delay: "{{ make_netobserv_delay | default(omit) }}" until: "{{ make_netobserv_until | default(true) }}" register: "make_netobserv_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv" dry_run: "{{ make_netobserv_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_env|default({})), **(make_netobserv_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000175015071030124033404 0ustar zuulzuul--- - name: Debug make_netobserv_cleanup_env when: make_netobserv_cleanup_env is defined ansible.builtin.debug: var: make_netobserv_cleanup_env - name: Debug make_netobserv_cleanup_params when: make_netobserv_cleanup_params is defined ansible.builtin.debug: var: make_netobserv_cleanup_params - name: Run netobserv_cleanup retries: "{{ make_netobserv_cleanup_retries | default(omit) }}" delay: "{{ make_netobserv_cleanup_delay | default(omit) }}" until: "{{ make_netobserv_cleanup_until | default(true) }}" register: "make_netobserv_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv_cleanup" dry_run: "{{ make_netobserv_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_cleanup_env|default({})), **(make_netobserv_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000173115071030124033403 0ustar zuulzuul--- - name: Debug make_netobserv_deploy_env when: make_netobserv_deploy_env is defined ansible.builtin.debug: var: make_netobserv_deploy_env - name: Debug make_netobserv_deploy_params when: make_netobserv_deploy_params is defined ansible.builtin.debug: var: make_netobserv_deploy_params - name: Run netobserv_deploy retries: "{{ make_netobserv_deploy_retries | default(omit) }}" delay: "{{ make_netobserv_deploy_delay | default(omit) }}" until: "{{ make_netobserv_deploy_until | default(true) }}" register: "make_netobserv_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv_deploy" dry_run: "{{ make_netobserv_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_deploy_env|default({})), **(make_netobserv_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000212115071030124033375 0ustar zuulzuul--- - name: Debug make_netobserv_deploy_cleanup_env when: make_netobserv_deploy_cleanup_env is defined ansible.builtin.debug: var: make_netobserv_deploy_cleanup_env - name: Debug make_netobserv_deploy_cleanup_params when: make_netobserv_deploy_cleanup_params is defined ansible.builtin.debug: var: make_netobserv_deploy_cleanup_params - name: Run netobserv_deploy_cleanup retries: "{{ make_netobserv_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_netobserv_deploy_cleanup_delay | default(omit) }}" until: "{{ make_netobserv_deploy_cleanup_until | default(true) }}" register: "make_netobserv_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv_deploy_cleanup" dry_run: "{{ make_netobserv_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_deploy_cleanup_env|default({})), **(make_netobserv_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_p0000644000175000017500000000161615071030124033344 0ustar zuulzuul--- - name: Debug make_manila_prep_env when: make_manila_prep_env is defined ansible.builtin.debug: var: make_manila_prep_env - name: Debug make_manila_prep_params when: make_manila_prep_params is defined ansible.builtin.debug: var: make_manila_prep_params - name: Run manila_prep retries: "{{ make_manila_prep_retries | default(omit) }}" delay: "{{ make_manila_prep_delay | default(omit) }}" until: "{{ make_manila_prep_until | default(true) }}" register: "make_manila_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_prep" dry_run: "{{ make_manila_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_prep_env|default({})), **(make_manila_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila.y0000644000175000017500000000150315071030124033267 0ustar zuulzuul--- - name: Debug make_manila_env when: make_manila_env is defined ansible.builtin.debug: var: make_manila_env - name: Debug make_manila_params when: make_manila_params is defined ansible.builtin.debug: var: make_manila_params - name: Run manila retries: "{{ make_manila_retries | default(omit) }}" delay: "{{ make_manila_delay | default(omit) }}" until: "{{ make_manila_until | default(true) }}" register: "make_manila_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila" dry_run: "{{ make_manila_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_env|default({})), **(make_manila_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_c0000644000175000017500000000167315071030124033332 0ustar zuulzuul--- - name: Debug make_manila_cleanup_env when: make_manila_cleanup_env is defined ansible.builtin.debug: var: make_manila_cleanup_env - name: Debug make_manila_cleanup_params when: make_manila_cleanup_params is defined ansible.builtin.debug: var: make_manila_cleanup_params - name: Run manila_cleanup retries: "{{ make_manila_cleanup_retries | default(omit) }}" delay: "{{ make_manila_cleanup_delay | default(omit) }}" until: "{{ make_manila_cleanup_until | default(true) }}" register: "make_manila_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_cleanup" dry_run: "{{ make_manila_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_cleanup_env|default({})), **(make_manila_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_d0000644000175000017500000000176715071030124033337 0ustar zuulzuul--- - name: Debug make_manila_deploy_prep_env when: make_manila_deploy_prep_env is defined ansible.builtin.debug: var: make_manila_deploy_prep_env - name: Debug make_manila_deploy_prep_params when: make_manila_deploy_prep_params is defined ansible.builtin.debug: var: make_manila_deploy_prep_params - name: Run manila_deploy_prep retries: "{{ make_manila_deploy_prep_retries | default(omit) }}" delay: "{{ make_manila_deploy_prep_delay | default(omit) }}" until: "{{ make_manila_deploy_prep_until | default(true) }}" register: "make_manila_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_deploy_prep" dry_run: "{{ make_manila_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_deploy_prep_env|default({})), **(make_manila_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_d0000644000175000017500000000165415071030124033332 0ustar zuulzuul--- - name: Debug make_manila_deploy_env when: make_manila_deploy_env is defined ansible.builtin.debug: var: make_manila_deploy_env - name: Debug make_manila_deploy_params when: make_manila_deploy_params is defined ansible.builtin.debug: var: make_manila_deploy_params - name: Run manila_deploy retries: "{{ make_manila_deploy_retries | default(omit) }}" delay: "{{ make_manila_deploy_delay | default(omit) }}" until: "{{ make_manila_deploy_until | default(true) }}" register: "make_manila_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_deploy" dry_run: "{{ make_manila_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_deploy_env|default({})), **(make_manila_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_d0000644000175000017500000000204415071030124033324 0ustar zuulzuul--- - name: Debug make_manila_deploy_cleanup_env when: make_manila_deploy_cleanup_env is defined ansible.builtin.debug: var: make_manila_deploy_cleanup_env - name: Debug make_manila_deploy_cleanup_params when: make_manila_deploy_cleanup_params is defined ansible.builtin.debug: var: make_manila_deploy_cleanup_params - name: Run manila_deploy_cleanup retries: "{{ make_manila_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_manila_deploy_cleanup_delay | default(omit) }}" until: "{{ make_manila_deploy_cleanup_until | default(true) }}" register: "make_manila_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_deploy_cleanup" dry_run: "{{ make_manila_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_deploy_cleanup_env|default({})), **(make_manila_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000167315071030124033410 0ustar zuulzuul--- - name: Debug make_telemetry_prep_env when: make_telemetry_prep_env is defined ansible.builtin.debug: var: make_telemetry_prep_env - name: Debug make_telemetry_prep_params when: make_telemetry_prep_params is defined ansible.builtin.debug: var: make_telemetry_prep_params - name: Run telemetry_prep retries: "{{ make_telemetry_prep_retries | default(omit) }}" delay: "{{ make_telemetry_prep_delay | default(omit) }}" until: "{{ make_telemetry_prep_until | default(true) }}" register: "make_telemetry_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_prep" dry_run: "{{ make_telemetry_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_prep_env|default({})), **(make_telemetry_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000156015071030124033403 0ustar zuulzuul--- - name: Debug make_telemetry_env when: make_telemetry_env is defined ansible.builtin.debug: var: make_telemetry_env - name: Debug make_telemetry_params when: make_telemetry_params is defined ansible.builtin.debug: var: make_telemetry_params - name: Run telemetry retries: "{{ make_telemetry_retries | default(omit) }}" delay: "{{ make_telemetry_delay | default(omit) }}" until: "{{ make_telemetry_until | default(true) }}" register: "make_telemetry_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry" dry_run: "{{ make_telemetry_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_env|default({})), **(make_telemetry_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000175015071030124033404 0ustar zuulzuul--- - name: Debug make_telemetry_cleanup_env when: make_telemetry_cleanup_env is defined ansible.builtin.debug: var: make_telemetry_cleanup_env - name: Debug make_telemetry_cleanup_params when: make_telemetry_cleanup_params is defined ansible.builtin.debug: var: make_telemetry_cleanup_params - name: Run telemetry_cleanup retries: "{{ make_telemetry_cleanup_retries | default(omit) }}" delay: "{{ make_telemetry_cleanup_delay | default(omit) }}" until: "{{ make_telemetry_cleanup_until | default(true) }}" register: "make_telemetry_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_cleanup" dry_run: "{{ make_telemetry_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_cleanup_env|default({})), **(make_telemetry_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000204415071030124033401 0ustar zuulzuul--- - name: Debug make_telemetry_deploy_prep_env when: make_telemetry_deploy_prep_env is defined ansible.builtin.debug: var: make_telemetry_deploy_prep_env - name: Debug make_telemetry_deploy_prep_params when: make_telemetry_deploy_prep_params is defined ansible.builtin.debug: var: make_telemetry_deploy_prep_params - name: Run telemetry_deploy_prep retries: "{{ make_telemetry_deploy_prep_retries | default(omit) }}" delay: "{{ make_telemetry_deploy_prep_delay | default(omit) }}" until: "{{ make_telemetry_deploy_prep_until | default(true) }}" register: "make_telemetry_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_deploy_prep" dry_run: "{{ make_telemetry_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_deploy_prep_env|default({})), **(make_telemetry_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000173115071030124033403 0ustar zuulzuul--- - name: Debug make_telemetry_deploy_env when: make_telemetry_deploy_env is defined ansible.builtin.debug: var: make_telemetry_deploy_env - name: Debug make_telemetry_deploy_params when: make_telemetry_deploy_params is defined ansible.builtin.debug: var: make_telemetry_deploy_params - name: Run telemetry_deploy retries: "{{ make_telemetry_deploy_retries | default(omit) }}" delay: "{{ make_telemetry_deploy_delay | default(omit) }}" until: "{{ make_telemetry_deploy_until | default(true) }}" register: "make_telemetry_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_deploy" dry_run: "{{ make_telemetry_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_deploy_env|default({})), **(make_telemetry_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000212115071030124033375 0ustar zuulzuul--- - name: Debug make_telemetry_deploy_cleanup_env when: make_telemetry_deploy_cleanup_env is defined ansible.builtin.debug: var: make_telemetry_deploy_cleanup_env - name: Debug make_telemetry_deploy_cleanup_params when: make_telemetry_deploy_cleanup_params is defined ansible.builtin.debug: var: make_telemetry_deploy_cleanup_params - name: Run telemetry_deploy_cleanup retries: "{{ make_telemetry_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_telemetry_deploy_cleanup_delay | default(omit) }}" until: "{{ make_telemetry_deploy_cleanup_until | default(true) }}" register: "make_telemetry_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_deploy_cleanup" dry_run: "{{ make_telemetry_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_deploy_cleanup_env|default({})), **(make_telemetry_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000200615071030124033377 0ustar zuulzuul--- - name: Debug make_telemetry_kuttl_run_env when: make_telemetry_kuttl_run_env is defined ansible.builtin.debug: var: make_telemetry_kuttl_run_env - name: Debug make_telemetry_kuttl_run_params when: make_telemetry_kuttl_run_params is defined ansible.builtin.debug: var: make_telemetry_kuttl_run_params - name: Run telemetry_kuttl_run retries: "{{ make_telemetry_kuttl_run_retries | default(omit) }}" delay: "{{ make_telemetry_kuttl_run_delay | default(omit) }}" until: "{{ make_telemetry_kuttl_run_until | default(true) }}" register: "make_telemetry_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_kuttl_run" dry_run: "{{ make_telemetry_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_kuttl_run_env|default({})), **(make_telemetry_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000171215071030124033402 0ustar zuulzuul--- - name: Debug make_telemetry_kuttl_env when: make_telemetry_kuttl_env is defined ansible.builtin.debug: var: make_telemetry_kuttl_env - name: Debug make_telemetry_kuttl_params when: make_telemetry_kuttl_params is defined ansible.builtin.debug: var: make_telemetry_kuttl_params - name: Run telemetry_kuttl retries: "{{ make_telemetry_kuttl_retries | default(omit) }}" delay: "{{ make_telemetry_kuttl_delay | default(omit) }}" until: "{{ make_telemetry_kuttl_until | default(true) }}" register: "make_telemetry_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_kuttl" dry_run: "{{ make_telemetry_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_kuttl_env|default({})), **(make_telemetry_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_pr0000644000175000017500000000157715071030124033427 0ustar zuulzuul--- - name: Debug make_swift_prep_env when: make_swift_prep_env is defined ansible.builtin.debug: var: make_swift_prep_env - name: Debug make_swift_prep_params when: make_swift_prep_params is defined ansible.builtin.debug: var: make_swift_prep_params - name: Run swift_prep retries: "{{ make_swift_prep_retries | default(omit) }}" delay: "{{ make_swift_prep_delay | default(omit) }}" until: "{{ make_swift_prep_until | default(true) }}" register: "make_swift_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_prep" dry_run: "{{ make_swift_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_prep_env|default({})), **(make_swift_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift.ym0000644000175000017500000000146415071030124033345 0ustar zuulzuul--- - name: Debug make_swift_env when: make_swift_env is defined ansible.builtin.debug: var: make_swift_env - name: Debug make_swift_params when: make_swift_params is defined ansible.builtin.debug: var: make_swift_params - name: Run swift retries: "{{ make_swift_retries | default(omit) }}" delay: "{{ make_swift_delay | default(omit) }}" until: "{{ make_swift_until | default(true) }}" register: "make_swift_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift" dry_run: "{{ make_swift_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_env|default({})), **(make_swift_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_cl0000644000175000017500000000165415071030124033400 0ustar zuulzuul--- - name: Debug make_swift_cleanup_env when: make_swift_cleanup_env is defined ansible.builtin.debug: var: make_swift_cleanup_env - name: Debug make_swift_cleanup_params when: make_swift_cleanup_params is defined ansible.builtin.debug: var: make_swift_cleanup_params - name: Run swift_cleanup retries: "{{ make_swift_cleanup_retries | default(omit) }}" delay: "{{ make_swift_cleanup_delay | default(omit) }}" until: "{{ make_swift_cleanup_until | default(true) }}" register: "make_swift_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_cleanup" dry_run: "{{ make_swift_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_cleanup_env|default({})), **(make_swift_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_de0000644000175000017500000000175015071030124033367 0ustar zuulzuul--- - name: Debug make_swift_deploy_prep_env when: make_swift_deploy_prep_env is defined ansible.builtin.debug: var: make_swift_deploy_prep_env - name: Debug make_swift_deploy_prep_params when: make_swift_deploy_prep_params is defined ansible.builtin.debug: var: make_swift_deploy_prep_params - name: Run swift_deploy_prep retries: "{{ make_swift_deploy_prep_retries | default(omit) }}" delay: "{{ make_swift_deploy_prep_delay | default(omit) }}" until: "{{ make_swift_deploy_prep_until | default(true) }}" register: "make_swift_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_deploy_prep" dry_run: "{{ make_swift_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_deploy_prep_env|default({})), **(make_swift_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_de0000644000175000017500000000163515071030124033371 0ustar zuulzuul--- - name: Debug make_swift_deploy_env when: make_swift_deploy_env is defined ansible.builtin.debug: var: make_swift_deploy_env - name: Debug make_swift_deploy_params when: make_swift_deploy_params is defined ansible.builtin.debug: var: make_swift_deploy_params - name: Run swift_deploy retries: "{{ make_swift_deploy_retries | default(omit) }}" delay: "{{ make_swift_deploy_delay | default(omit) }}" until: "{{ make_swift_deploy_until | default(true) }}" register: "make_swift_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_deploy" dry_run: "{{ make_swift_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_deploy_env|default({})), **(make_swift_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_de0000644000175000017500000000202515071030124033363 0ustar zuulzuul--- - name: Debug make_swift_deploy_cleanup_env when: make_swift_deploy_cleanup_env is defined ansible.builtin.debug: var: make_swift_deploy_cleanup_env - name: Debug make_swift_deploy_cleanup_params when: make_swift_deploy_cleanup_params is defined ansible.builtin.debug: var: make_swift_deploy_cleanup_params - name: Run swift_deploy_cleanup retries: "{{ make_swift_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_swift_deploy_cleanup_delay | default(omit) }}" until: "{{ make_swift_deploy_cleanup_until | default(true) }}" register: "make_swift_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_deploy_cleanup" dry_run: "{{ make_swift_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_deploy_cleanup_env|default({})), **(make_swift_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmanager.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmana0000644000175000017500000000161615071030124033356 0ustar zuulzuul--- - name: Debug make_certmanager_env when: make_certmanager_env is defined ansible.builtin.debug: var: make_certmanager_env - name: Debug make_certmanager_params when: make_certmanager_params is defined ansible.builtin.debug: var: make_certmanager_params - name: Run certmanager retries: "{{ make_certmanager_retries | default(omit) }}" delay: "{{ make_certmanager_delay | default(omit) }}" until: "{{ make_certmanager_until | default(true) }}" register: "make_certmanager_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make certmanager" dry_run: "{{ make_certmanager_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_certmanager_env|default({})), **(make_certmanager_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmanager_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmana0000644000175000017500000000200615071030124033350 0ustar zuulzuul--- - name: Debug make_certmanager_cleanup_env when: make_certmanager_cleanup_env is defined ansible.builtin.debug: var: make_certmanager_cleanup_env - name: Debug make_certmanager_cleanup_params when: make_certmanager_cleanup_params is defined ansible.builtin.debug: var: make_certmanager_cleanup_params - name: Run certmanager_cleanup retries: "{{ make_certmanager_cleanup_retries | default(omit) }}" delay: "{{ make_certmanager_cleanup_delay | default(omit) }}" until: "{{ make_certmanager_cleanup_until | default(true) }}" register: "make_certmanager_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make certmanager_cleanup" dry_run: "{{ make_certmanager_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_certmanager_cleanup_env|default({})), **(make_certmanager_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_validate_marketplace.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_validate0000644000175000017500000000202515071030124033350 0ustar zuulzuul--- - name: Debug make_validate_marketplace_env when: make_validate_marketplace_env is defined ansible.builtin.debug: var: make_validate_marketplace_env - name: Debug make_validate_marketplace_params when: make_validate_marketplace_params is defined ansible.builtin.debug: var: make_validate_marketplace_params - name: Run validate_marketplace retries: "{{ make_validate_marketplace_retries | default(omit) }}" delay: "{{ make_validate_marketplace_delay | default(omit) }}" until: "{{ make_validate_marketplace_until | default(true) }}" register: "make_validate_marketplace_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make validate_marketplace" dry_run: "{{ make_validate_marketplace_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_validate_marketplace_env|default({})), **(make_validate_marketplace_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_de0000644000175000017500000000175015071030124033341 0ustar zuulzuul--- - name: Debug make_redis_deploy_prep_env when: make_redis_deploy_prep_env is defined ansible.builtin.debug: var: make_redis_deploy_prep_env - name: Debug make_redis_deploy_prep_params when: make_redis_deploy_prep_params is defined ansible.builtin.debug: var: make_redis_deploy_prep_params - name: Run redis_deploy_prep retries: "{{ make_redis_deploy_prep_retries | default(omit) }}" delay: "{{ make_redis_deploy_prep_delay | default(omit) }}" until: "{{ make_redis_deploy_prep_until | default(true) }}" register: "make_redis_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make redis_deploy_prep" dry_run: "{{ make_redis_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_redis_deploy_prep_env|default({})), **(make_redis_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_de0000644000175000017500000000163515071030124033343 0ustar zuulzuul--- - name: Debug make_redis_deploy_env when: make_redis_deploy_env is defined ansible.builtin.debug: var: make_redis_deploy_env - name: Debug make_redis_deploy_params when: make_redis_deploy_params is defined ansible.builtin.debug: var: make_redis_deploy_params - name: Run redis_deploy retries: "{{ make_redis_deploy_retries | default(omit) }}" delay: "{{ make_redis_deploy_delay | default(omit) }}" until: "{{ make_redis_deploy_until | default(true) }}" register: "make_redis_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make redis_deploy" dry_run: "{{ make_redis_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_redis_deploy_env|default({})), **(make_redis_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_de0000644000175000017500000000202515071030124033335 0ustar zuulzuul--- - name: Debug make_redis_deploy_cleanup_env when: make_redis_deploy_cleanup_env is defined ansible.builtin.debug: var: make_redis_deploy_cleanup_env - name: Debug make_redis_deploy_cleanup_params when: make_redis_deploy_cleanup_params is defined ansible.builtin.debug: var: make_redis_deploy_cleanup_params - name: Run redis_deploy_cleanup retries: "{{ make_redis_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_redis_deploy_cleanup_delay | default(omit) }}" until: "{{ make_redis_deploy_cleanup_until | default(true) }}" register: "make_redis_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make redis_deploy_cleanup" dry_run: "{{ make_redis_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_redis_deploy_cleanup_env|default({})), **(make_redis_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_set_slower_etcd_profile.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_set_slow0000644000175000017500000000210215071030124033412 0ustar zuulzuul--- - name: Debug make_set_slower_etcd_profile_env when: make_set_slower_etcd_profile_env is defined ansible.builtin.debug: var: make_set_slower_etcd_profile_env - name: Debug make_set_slower_etcd_profile_params when: make_set_slower_etcd_profile_params is defined ansible.builtin.debug: var: make_set_slower_etcd_profile_params - name: Run set_slower_etcd_profile retries: "{{ make_set_slower_etcd_profile_retries | default(omit) }}" delay: "{{ make_set_slower_etcd_profile_delay | default(omit) }}" until: "{{ make_set_slower_etcd_profile_until | default(true) }}" register: "make_set_slower_etcd_profile_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make set_slower_etcd_profile" dry_run: "{{ make_set_slower_etcd_profile_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_set_slower_etcd_profile_env|default({})), **(make_set_slower_etcd_profile_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_download_tools.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_download0000644000175000017500000000170415071030124033371 0ustar zuulzuul--- - name: Debug make_download_tools_env when: make_download_tools_env is defined ansible.builtin.debug: var: make_download_tools_env - name: Debug make_download_tools_params when: make_download_tools_params is defined ansible.builtin.debug: var: make_download_tools_params - name: Run download_tools retries: "{{ make_download_tools_retries | default(omit) }}" delay: "{{ make_download_tools_delay | default(omit) }}" until: "{{ make_download_tools_until | default(true) }}" register: "make_download_tools_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make download_tools" dry_run: "{{ make_download_tools_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_download_tools_env|default({})), **(make_download_tools_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nfs.yml0000644000175000017500000000143715071030124033153 0ustar zuulzuul--- - name: Debug make_nfs_env when: make_nfs_env is defined ansible.builtin.debug: var: make_nfs_env - name: Debug make_nfs_params when: make_nfs_params is defined ansible.builtin.debug: var: make_nfs_params - name: Run nfs retries: "{{ make_nfs_retries | default(omit) }}" delay: "{{ make_nfs_delay | default(omit) }}" until: "{{ make_nfs_until | default(true) }}" register: "make_nfs_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make nfs" dry_run: "{{ make_nfs_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nfs_env|default({})), **(make_nfs_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nfs_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nfs_clea0000644000175000017500000000162715071030124033340 0ustar zuulzuul--- - name: Debug make_nfs_cleanup_env when: make_nfs_cleanup_env is defined ansible.builtin.debug: var: make_nfs_cleanup_env - name: Debug make_nfs_cleanup_params when: make_nfs_cleanup_params is defined ansible.builtin.debug: var: make_nfs_cleanup_params - name: Run nfs_cleanup retries: "{{ make_nfs_cleanup_retries | default(omit) }}" delay: "{{ make_nfs_cleanup_delay | default(omit) }}" until: "{{ make_nfs_cleanup_until | default(true) }}" register: "make_nfs_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make nfs_cleanup" dry_run: "{{ make_nfs_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nfs_cleanup_env|default({})), **(make_nfs_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc.yml0000644000175000017500000000143715071030124033134 0ustar zuulzuul--- - name: Debug make_crc_env when: make_crc_env is defined ansible.builtin.debug: var: make_crc_env - name: Debug make_crc_params when: make_crc_params is defined ansible.builtin.debug: var: make_crc_params - name: Run crc retries: "{{ make_crc_retries | default(omit) }}" delay: "{{ make_crc_delay | default(omit) }}" until: "{{ make_crc_until | default(true) }}" register: "make_crc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc" dry_run: "{{ make_crc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_env|default({})), **(make_crc_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_clea0000644000175000017500000000162715071030124033321 0ustar zuulzuul--- - name: Debug make_crc_cleanup_env when: make_crc_cleanup_env is defined ansible.builtin.debug: var: make_crc_cleanup_env - name: Debug make_crc_cleanup_params when: make_crc_cleanup_params is defined ansible.builtin.debug: var: make_crc_cleanup_params - name: Run crc_cleanup retries: "{{ make_crc_cleanup_retries | default(omit) }}" delay: "{{ make_crc_cleanup_delay | default(omit) }}" until: "{{ make_crc_cleanup_until | default(true) }}" register: "make_crc_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_cleanup" dry_run: "{{ make_crc_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_cleanup_env|default({})), **(make_crc_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_scrub.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_scru0000644000175000017500000000157115071030124033367 0ustar zuulzuul--- - name: Debug make_crc_scrub_env when: make_crc_scrub_env is defined ansible.builtin.debug: var: make_crc_scrub_env - name: Debug make_crc_scrub_params when: make_crc_scrub_params is defined ansible.builtin.debug: var: make_crc_scrub_params - name: Run crc_scrub retries: "{{ make_crc_scrub_retries | default(omit) }}" delay: "{{ make_crc_scrub_delay | default(omit) }}" until: "{{ make_crc_scrub_until | default(true) }}" register: "make_crc_scrub_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_scrub" dry_run: "{{ make_crc_scrub_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_scrub_env|default({})), **(make_crc_scrub_params|default({}))) }}" ././@LongLink0000644000000000000000000000017500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_attach_default_interface.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_atta0000644000175000017500000000222615071030124033342 0ustar zuulzuul--- - name: Debug make_crc_attach_default_interface_env when: make_crc_attach_default_interface_env is defined ansible.builtin.debug: var: make_crc_attach_default_interface_env - name: Debug make_crc_attach_default_interface_params when: make_crc_attach_default_interface_params is defined ansible.builtin.debug: var: make_crc_attach_default_interface_params - name: Run crc_attach_default_interface retries: "{{ make_crc_attach_default_interface_retries | default(omit) }}" delay: "{{ make_crc_attach_default_interface_delay | default(omit) }}" until: "{{ make_crc_attach_default_interface_until | default(true) }}" register: "make_crc_attach_default_interface_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_attach_default_interface" dry_run: "{{ make_crc_attach_default_interface_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_attach_default_interface_env|default({})), **(make_crc_attach_default_interface_params|default({}))) }}" ././@LongLink0000644000000000000000000000020500000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_attach_default_interface_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_atta0000644000175000017500000000241615071030124033343 0ustar zuulzuul--- - name: Debug make_crc_attach_default_interface_cleanup_env when: make_crc_attach_default_interface_cleanup_env is defined ansible.builtin.debug: var: make_crc_attach_default_interface_cleanup_env - name: Debug make_crc_attach_default_interface_cleanup_params when: make_crc_attach_default_interface_cleanup_params is defined ansible.builtin.debug: var: make_crc_attach_default_interface_cleanup_params - name: Run crc_attach_default_interface_cleanup retries: "{{ make_crc_attach_default_interface_cleanup_retries | default(omit) }}" delay: "{{ make_crc_attach_default_interface_cleanup_delay | default(omit) }}" until: "{{ make_crc_attach_default_interface_cleanup_until | default(true) }}" register: "make_crc_attach_default_interface_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_attach_default_interface_cleanup" dry_run: "{{ make_crc_attach_default_interface_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_attach_default_interface_cleanup_env|default({})), **(make_crc_attach_default_interface_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_network.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000174215071030124033266 0ustar zuulzuul--- - name: Debug make_ipv6_lab_network_env when: make_ipv6_lab_network_env is defined ansible.builtin.debug: var: make_ipv6_lab_network_env - name: Debug make_ipv6_lab_network_params when: make_ipv6_lab_network_params is defined ansible.builtin.debug: var: make_ipv6_lab_network_params - name: Run ipv6_lab_network retries: "{{ make_ipv6_lab_network_retries | default(omit) }}" delay: "{{ make_ipv6_lab_network_delay | default(omit) }}" until: "{{ make_ipv6_lab_network_until | default(true) }}" register: "make_ipv6_lab_network_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_network" dry_run: "{{ make_ipv6_lab_network_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_network_env|default({})), **(make_ipv6_lab_network_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_network_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000213215071030124033260 0ustar zuulzuul--- - name: Debug make_ipv6_lab_network_cleanup_env when: make_ipv6_lab_network_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_network_cleanup_env - name: Debug make_ipv6_lab_network_cleanup_params when: make_ipv6_lab_network_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_network_cleanup_params - name: Run ipv6_lab_network_cleanup retries: "{{ make_ipv6_lab_network_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_network_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_network_cleanup_until | default(true) }}" register: "make_ipv6_lab_network_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_network_cleanup" dry_run: "{{ make_ipv6_lab_network_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_network_cleanup_env|default({})), **(make_ipv6_lab_network_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_nat64_router.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000205515071030124033264 0ustar zuulzuul--- - name: Debug make_ipv6_lab_nat64_router_env when: make_ipv6_lab_nat64_router_env is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_env - name: Debug make_ipv6_lab_nat64_router_params when: make_ipv6_lab_nat64_router_params is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_params - name: Run ipv6_lab_nat64_router retries: "{{ make_ipv6_lab_nat64_router_retries | default(omit) }}" delay: "{{ make_ipv6_lab_nat64_router_delay | default(omit) }}" until: "{{ make_ipv6_lab_nat64_router_until | default(true) }}" register: "make_ipv6_lab_nat64_router_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_nat64_router" dry_run: "{{ make_ipv6_lab_nat64_router_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_nat64_router_env|default({})), **(make_ipv6_lab_nat64_router_params|default({}))) }}" ././@LongLink0000644000000000000000000000017600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_nat64_router_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000224515071030124033265 0ustar zuulzuul--- - name: Debug make_ipv6_lab_nat64_router_cleanup_env when: make_ipv6_lab_nat64_router_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_cleanup_env - name: Debug make_ipv6_lab_nat64_router_cleanup_params when: make_ipv6_lab_nat64_router_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_cleanup_params - name: Run ipv6_lab_nat64_router_cleanup retries: "{{ make_ipv6_lab_nat64_router_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_nat64_router_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_nat64_router_cleanup_until | default(true) }}" register: "make_ipv6_lab_nat64_router_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_nat64_router_cleanup" dry_run: "{{ make_ipv6_lab_nat64_router_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_nat64_router_cleanup_env|default({})), **(make_ipv6_lab_nat64_router_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_sno.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000164615071030124033271 0ustar zuulzuul--- - name: Debug make_ipv6_lab_sno_env when: make_ipv6_lab_sno_env is defined ansible.builtin.debug: var: make_ipv6_lab_sno_env - name: Debug make_ipv6_lab_sno_params when: make_ipv6_lab_sno_params is defined ansible.builtin.debug: var: make_ipv6_lab_sno_params - name: Run ipv6_lab_sno retries: "{{ make_ipv6_lab_sno_retries | default(omit) }}" delay: "{{ make_ipv6_lab_sno_delay | default(omit) }}" until: "{{ make_ipv6_lab_sno_until | default(true) }}" register: "make_ipv6_lab_sno_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_sno" dry_run: "{{ make_ipv6_lab_sno_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_sno_env|default({})), **(make_ipv6_lab_sno_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_sno_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000203615071030124033263 0ustar zuulzuul--- - name: Debug make_ipv6_lab_sno_cleanup_env when: make_ipv6_lab_sno_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_sno_cleanup_env - name: Debug make_ipv6_lab_sno_cleanup_params when: make_ipv6_lab_sno_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_sno_cleanup_params - name: Run ipv6_lab_sno_cleanup retries: "{{ make_ipv6_lab_sno_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_sno_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_sno_cleanup_until | default(true) }}" register: "make_ipv6_lab_sno_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_sno_cleanup" dry_run: "{{ make_ipv6_lab_sno_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_sno_cleanup_env|default({})), **(make_ipv6_lab_sno_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000155215071030124033265 0ustar zuulzuul--- - name: Debug make_ipv6_lab_env when: make_ipv6_lab_env is defined ansible.builtin.debug: var: make_ipv6_lab_env - name: Debug make_ipv6_lab_params when: make_ipv6_lab_params is defined ansible.builtin.debug: var: make_ipv6_lab_params - name: Run ipv6_lab retries: "{{ make_ipv6_lab_retries | default(omit) }}" delay: "{{ make_ipv6_lab_delay | default(omit) }}" until: "{{ make_ipv6_lab_until | default(true) }}" register: "make_ipv6_lab_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab" dry_run: "{{ make_ipv6_lab_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_env|default({})), **(make_ipv6_lab_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000174215071030124033266 0ustar zuulzuul--- - name: Debug make_ipv6_lab_cleanup_env when: make_ipv6_lab_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_cleanup_env - name: Debug make_ipv6_lab_cleanup_params when: make_ipv6_lab_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_cleanup_params - name: Run ipv6_lab_cleanup retries: "{{ make_ipv6_lab_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_cleanup_until | default(true) }}" register: "make_ipv6_lab_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_cleanup" dry_run: "{{ make_ipv6_lab_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_cleanup_env|default({})), **(make_ipv6_lab_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_default_interface.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_d0000644000175000017500000000213215071030124033325 0ustar zuulzuul--- - name: Debug make_attach_default_interface_env when: make_attach_default_interface_env is defined ansible.builtin.debug: var: make_attach_default_interface_env - name: Debug make_attach_default_interface_params when: make_attach_default_interface_params is defined ansible.builtin.debug: var: make_attach_default_interface_params - name: Run attach_default_interface retries: "{{ make_attach_default_interface_retries | default(omit) }}" delay: "{{ make_attach_default_interface_delay | default(omit) }}" until: "{{ make_attach_default_interface_until | default(true) }}" register: "make_attach_default_interface_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make attach_default_interface" dry_run: "{{ make_attach_default_interface_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_attach_default_interface_env|default({})), **(make_attach_default_interface_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_default_interface_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_d0000644000175000017500000000232215071030124033326 0ustar zuulzuul--- - name: Debug make_attach_default_interface_cleanup_env when: make_attach_default_interface_cleanup_env is defined ansible.builtin.debug: var: make_attach_default_interface_cleanup_env - name: Debug make_attach_default_interface_cleanup_params when: make_attach_default_interface_cleanup_params is defined ansible.builtin.debug: var: make_attach_default_interface_cleanup_params - name: Run attach_default_interface_cleanup retries: "{{ make_attach_default_interface_cleanup_retries | default(omit) }}" delay: "{{ make_attach_default_interface_cleanup_delay | default(omit) }}" until: "{{ make_attach_default_interface_cleanup_until | default(true) }}" register: "make_attach_default_interface_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make attach_default_interface_cleanup" dry_run: "{{ make_attach_default_interface_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_attach_default_interface_cleanup_env|default({})), **(make_attach_default_interface_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_isolation_bridge.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_0000644000175000017500000000213215071030124033406 0ustar zuulzuul--- - name: Debug make_network_isolation_bridge_env when: make_network_isolation_bridge_env is defined ansible.builtin.debug: var: make_network_isolation_bridge_env - name: Debug make_network_isolation_bridge_params when: make_network_isolation_bridge_params is defined ansible.builtin.debug: var: make_network_isolation_bridge_params - name: Run network_isolation_bridge retries: "{{ make_network_isolation_bridge_retries | default(omit) }}" delay: "{{ make_network_isolation_bridge_delay | default(omit) }}" until: "{{ make_network_isolation_bridge_until | default(true) }}" register: "make_network_isolation_bridge_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make network_isolation_bridge" dry_run: "{{ make_network_isolation_bridge_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_network_isolation_bridge_env|default({})), **(make_network_isolation_bridge_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_isolation_bridge_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_0000644000175000017500000000232215071030124033407 0ustar zuulzuul--- - name: Debug make_network_isolation_bridge_cleanup_env when: make_network_isolation_bridge_cleanup_env is defined ansible.builtin.debug: var: make_network_isolation_bridge_cleanup_env - name: Debug make_network_isolation_bridge_cleanup_params when: make_network_isolation_bridge_cleanup_params is defined ansible.builtin.debug: var: make_network_isolation_bridge_cleanup_params - name: Run network_isolation_bridge_cleanup retries: "{{ make_network_isolation_bridge_cleanup_retries | default(omit) }}" delay: "{{ make_network_isolation_bridge_cleanup_delay | default(omit) }}" until: "{{ make_network_isolation_bridge_cleanup_until | default(true) }}" register: "make_network_isolation_bridge_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make network_isolation_bridge_cleanup" dry_run: "{{ make_network_isolation_bridge_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_network_isolation_bridge_cleanup_env|default({})), **(make_network_isolation_bridge_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_baremetal_compute.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_bar0000644000175000017500000000207415071030124033334 0ustar zuulzuul--- - name: Debug make_edpm_baremetal_compute_env when: make_edpm_baremetal_compute_env is defined ansible.builtin.debug: var: make_edpm_baremetal_compute_env - name: Debug make_edpm_baremetal_compute_params when: make_edpm_baremetal_compute_params is defined ansible.builtin.debug: var: make_edpm_baremetal_compute_params - name: Run edpm_baremetal_compute retries: "{{ make_edpm_baremetal_compute_retries | default(omit) }}" delay: "{{ make_edpm_baremetal_compute_delay | default(omit) }}" until: "{{ make_edpm_baremetal_compute_until | default(true) }}" register: "make_edpm_baremetal_compute_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_baremetal_compute" dry_run: "{{ make_edpm_baremetal_compute_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_baremetal_compute_env|default({})), **(make_edpm_baremetal_compute_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000164615071030124033352 0ustar zuulzuul--- - name: Debug make_edpm_compute_env when: make_edpm_compute_env is defined ansible.builtin.debug: var: make_edpm_compute_env - name: Debug make_edpm_compute_params when: make_edpm_compute_params is defined ansible.builtin.debug: var: make_edpm_compute_params - name: Run edpm_compute retries: "{{ make_edpm_compute_retries | default(omit) }}" delay: "{{ make_edpm_compute_delay | default(omit) }}" until: "{{ make_edpm_compute_until | default(true) }}" register: "make_edpm_compute_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute" dry_run: "{{ make_edpm_compute_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_env|default({})), **(make_edpm_compute_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute_bootc.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000200015071030124033333 0ustar zuulzuul--- - name: Debug make_edpm_compute_bootc_env when: make_edpm_compute_bootc_env is defined ansible.builtin.debug: var: make_edpm_compute_bootc_env - name: Debug make_edpm_compute_bootc_params when: make_edpm_compute_bootc_params is defined ansible.builtin.debug: var: make_edpm_compute_bootc_params - name: Run edpm_compute_bootc retries: "{{ make_edpm_compute_bootc_retries | default(omit) }}" delay: "{{ make_edpm_compute_bootc_delay | default(omit) }}" until: "{{ make_edpm_compute_bootc_until | default(true) }}" register: "make_edpm_compute_bootc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute_bootc" dry_run: "{{ make_edpm_compute_bootc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_bootc_env|default({})), **(make_edpm_compute_bootc_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_ansible_runner.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_ans0000644000175000017500000000201715071030124033346 0ustar zuulzuul--- - name: Debug make_edpm_ansible_runner_env when: make_edpm_ansible_runner_env is defined ansible.builtin.debug: var: make_edpm_ansible_runner_env - name: Debug make_edpm_ansible_runner_params when: make_edpm_ansible_runner_params is defined ansible.builtin.debug: var: make_edpm_ansible_runner_params - name: Run edpm_ansible_runner retries: "{{ make_edpm_ansible_runner_retries | default(omit) }}" delay: "{{ make_edpm_ansible_runner_delay | default(omit) }}" until: "{{ make_edpm_ansible_runner_until | default(true) }}" register: "make_edpm_ansible_runner_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_ansible_runner" dry_run: "{{ make_edpm_ansible_runner_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_ansible_runner_env|default({})), **(make_edpm_ansible_runner_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_computes_bgp.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000176115071030124033350 0ustar zuulzuul--- - name: Debug make_edpm_computes_bgp_env when: make_edpm_computes_bgp_env is defined ansible.builtin.debug: var: make_edpm_computes_bgp_env - name: Debug make_edpm_computes_bgp_params when: make_edpm_computes_bgp_params is defined ansible.builtin.debug: var: make_edpm_computes_bgp_params - name: Run edpm_computes_bgp retries: "{{ make_edpm_computes_bgp_retries | default(omit) }}" delay: "{{ make_edpm_computes_bgp_delay | default(omit) }}" until: "{{ make_edpm_computes_bgp_until | default(true) }}" register: "make_edpm_computes_bgp_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_computes_bgp" dry_run: "{{ make_edpm_computes_bgp_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_computes_bgp_env|default({})), **(make_edpm_computes_bgp_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute_repos.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000200015071030124033333 0ustar zuulzuul--- - name: Debug make_edpm_compute_repos_env when: make_edpm_compute_repos_env is defined ansible.builtin.debug: var: make_edpm_compute_repos_env - name: Debug make_edpm_compute_repos_params when: make_edpm_compute_repos_params is defined ansible.builtin.debug: var: make_edpm_compute_repos_params - name: Run edpm_compute_repos retries: "{{ make_edpm_compute_repos_retries | default(omit) }}" delay: "{{ make_edpm_compute_repos_delay | default(omit) }}" until: "{{ make_edpm_compute_repos_until | default(true) }}" register: "make_edpm_compute_repos_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute_repos" dry_run: "{{ make_edpm_compute_repos_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_repos_env|default({})), **(make_edpm_compute_repos_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000203615071030124033344 0ustar zuulzuul--- - name: Debug make_edpm_compute_cleanup_env when: make_edpm_compute_cleanup_env is defined ansible.builtin.debug: var: make_edpm_compute_cleanup_env - name: Debug make_edpm_compute_cleanup_params when: make_edpm_compute_cleanup_params is defined ansible.builtin.debug: var: make_edpm_compute_cleanup_params - name: Run edpm_compute_cleanup retries: "{{ make_edpm_compute_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_compute_cleanup_delay | default(omit) }}" until: "{{ make_edpm_compute_cleanup_until | default(true) }}" register: "make_edpm_compute_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute_cleanup" dry_run: "{{ make_edpm_compute_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_cleanup_env|default({})), **(make_edpm_compute_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_networker.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_net0000644000175000017500000000170415071030124033355 0ustar zuulzuul--- - name: Debug make_edpm_networker_env when: make_edpm_networker_env is defined ansible.builtin.debug: var: make_edpm_networker_env - name: Debug make_edpm_networker_params when: make_edpm_networker_params is defined ansible.builtin.debug: var: make_edpm_networker_params - name: Run edpm_networker retries: "{{ make_edpm_networker_retries | default(omit) }}" delay: "{{ make_edpm_networker_delay | default(omit) }}" until: "{{ make_edpm_networker_until | default(true) }}" register: "make_edpm_networker_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_networker" dry_run: "{{ make_edpm_networker_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_networker_env|default({})), **(make_edpm_networker_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_networker_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_net0000644000175000017500000000207415071030124033356 0ustar zuulzuul--- - name: Debug make_edpm_networker_cleanup_env when: make_edpm_networker_cleanup_env is defined ansible.builtin.debug: var: make_edpm_networker_cleanup_env - name: Debug make_edpm_networker_cleanup_params when: make_edpm_networker_cleanup_params is defined ansible.builtin.debug: var: make_edpm_networker_cleanup_params - name: Run edpm_networker_cleanup retries: "{{ make_edpm_networker_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_networker_cleanup_delay | default(omit) }}" until: "{{ make_edpm_networker_cleanup_until | default(true) }}" register: "make_edpm_networker_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_networker_cleanup" dry_run: "{{ make_edpm_networker_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_networker_cleanup_env|default({})), **(make_edpm_networker_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_instance.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000203615071030124033336 0ustar zuulzuul--- - name: Debug make_edpm_deploy_instance_env when: make_edpm_deploy_instance_env is defined ansible.builtin.debug: var: make_edpm_deploy_instance_env - name: Debug make_edpm_deploy_instance_params when: make_edpm_deploy_instance_params is defined ansible.builtin.debug: var: make_edpm_deploy_instance_params - name: Run edpm_deploy_instance retries: "{{ make_edpm_deploy_instance_retries | default(omit) }}" delay: "{{ make_edpm_deploy_instance_delay | default(omit) }}" until: "{{ make_edpm_deploy_instance_until | default(true) }}" register: "make_edpm_deploy_instance_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_deploy_instance" dry_run: "{{ make_edpm_deploy_instance_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_instance_env|default({})), **(make_edpm_deploy_instance_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_tripleo_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_tripleo_0000644000175000017500000000170415071030124033377 0ustar zuulzuul--- - name: Debug make_tripleo_deploy_env when: make_tripleo_deploy_env is defined ansible.builtin.debug: var: make_tripleo_deploy_env - name: Debug make_tripleo_deploy_params when: make_tripleo_deploy_params is defined ansible.builtin.debug: var: make_tripleo_deploy_params - name: Run tripleo_deploy retries: "{{ make_tripleo_deploy_retries | default(omit) }}" delay: "{{ make_tripleo_deploy_delay | default(omit) }}" until: "{{ make_tripleo_deploy_until | default(true) }}" register: "make_tripleo_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make tripleo_deploy" dry_run: "{{ make_tripleo_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_tripleo_deploy_env|default({})), **(make_tripleo_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000176115071030124033372 0ustar zuulzuul--- - name: Debug make_standalone_deploy_env when: make_standalone_deploy_env is defined ansible.builtin.debug: var: make_standalone_deploy_env - name: Debug make_standalone_deploy_params when: make_standalone_deploy_params is defined ansible.builtin.debug: var: make_standalone_deploy_params - name: Run standalone_deploy retries: "{{ make_standalone_deploy_retries | default(omit) }}" delay: "{{ make_standalone_deploy_delay | default(omit) }}" until: "{{ make_standalone_deploy_until | default(true) }}" register: "make_standalone_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_deploy" dry_run: "{{ make_standalone_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_deploy_env|default({})), **(make_standalone_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_sync.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000172315071030124033370 0ustar zuulzuul--- - name: Debug make_standalone_sync_env when: make_standalone_sync_env is defined ansible.builtin.debug: var: make_standalone_sync_env - name: Debug make_standalone_sync_params when: make_standalone_sync_params is defined ansible.builtin.debug: var: make_standalone_sync_params - name: Run standalone_sync retries: "{{ make_standalone_sync_retries | default(omit) }}" delay: "{{ make_standalone_sync_delay | default(omit) }}" until: "{{ make_standalone_sync_until | default(true) }}" register: "make_standalone_sync_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_sync" dry_run: "{{ make_standalone_sync_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_sync_env|default({})), **(make_standalone_sync_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000161015071030124033363 0ustar zuulzuul--- - name: Debug make_standalone_env when: make_standalone_env is defined ansible.builtin.debug: var: make_standalone_env - name: Debug make_standalone_params when: make_standalone_params is defined ansible.builtin.debug: var: make_standalone_params - name: Run standalone retries: "{{ make_standalone_retries | default(omit) }}" delay: "{{ make_standalone_delay | default(omit) }}" until: "{{ make_standalone_until | default(true) }}" register: "make_standalone_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone" dry_run: "{{ make_standalone_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_env|default({})), **(make_standalone_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000200015071030124033355 0ustar zuulzuul--- - name: Debug make_standalone_cleanup_env when: make_standalone_cleanup_env is defined ansible.builtin.debug: var: make_standalone_cleanup_env - name: Debug make_standalone_cleanup_params when: make_standalone_cleanup_params is defined ansible.builtin.debug: var: make_standalone_cleanup_params - name: Run standalone_cleanup retries: "{{ make_standalone_cleanup_retries | default(omit) }}" delay: "{{ make_standalone_cleanup_delay | default(omit) }}" until: "{{ make_standalone_cleanup_until | default(true) }}" register: "make_standalone_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_cleanup" dry_run: "{{ make_standalone_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_cleanup_env|default({})), **(make_standalone_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_snapshot.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000201715071030124033365 0ustar zuulzuul--- - name: Debug make_standalone_snapshot_env when: make_standalone_snapshot_env is defined ansible.builtin.debug: var: make_standalone_snapshot_env - name: Debug make_standalone_snapshot_params when: make_standalone_snapshot_params is defined ansible.builtin.debug: var: make_standalone_snapshot_params - name: Run standalone_snapshot retries: "{{ make_standalone_snapshot_retries | default(omit) }}" delay: "{{ make_standalone_snapshot_delay | default(omit) }}" until: "{{ make_standalone_snapshot_until | default(true) }}" register: "make_standalone_snapshot_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_snapshot" dry_run: "{{ make_standalone_snapshot_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_snapshot_env|default({})), **(make_standalone_snapshot_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_revert.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000176115071030124033372 0ustar zuulzuul--- - name: Debug make_standalone_revert_env when: make_standalone_revert_env is defined ansible.builtin.debug: var: make_standalone_revert_env - name: Debug make_standalone_revert_params when: make_standalone_revert_params is defined ansible.builtin.debug: var: make_standalone_revert_params - name: Run standalone_revert retries: "{{ make_standalone_revert_retries | default(omit) }}" delay: "{{ make_standalone_revert_delay | default(omit) }}" until: "{{ make_standalone_revert_until | default(true) }}" register: "make_standalone_revert_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_revert" dry_run: "{{ make_standalone_revert_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_revert_env|default({})), **(make_standalone_revert_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_prepare.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_pr0000644000175000017500000000166515071030124033376 0ustar zuulzuul--- - name: Debug make_cifmw_prepare_env when: make_cifmw_prepare_env is defined ansible.builtin.debug: var: make_cifmw_prepare_env - name: Debug make_cifmw_prepare_params when: make_cifmw_prepare_params is defined ansible.builtin.debug: var: make_cifmw_prepare_params - name: Run cifmw_prepare retries: "{{ make_cifmw_prepare_retries | default(omit) }}" delay: "{{ make_cifmw_prepare_delay | default(omit) }}" until: "{{ make_cifmw_prepare_until | default(true) }}" register: "make_cifmw_prepare_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make cifmw_prepare" dry_run: "{{ make_cifmw_prepare_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cifmw_prepare_env|default({})), **(make_cifmw_prepare_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_cl0000644000175000017500000000166515071030124033353 0ustar zuulzuul--- - name: Debug make_cifmw_cleanup_env when: make_cifmw_cleanup_env is defined ansible.builtin.debug: var: make_cifmw_cleanup_env - name: Debug make_cifmw_cleanup_params when: make_cifmw_cleanup_params is defined ansible.builtin.debug: var: make_cifmw_cleanup_params - name: Run cifmw_cleanup retries: "{{ make_cifmw_cleanup_retries | default(omit) }}" delay: "{{ make_cifmw_cleanup_delay | default(omit) }}" until: "{{ make_cifmw_cleanup_until | default(true) }}" register: "make_cifmw_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make cifmw_cleanup" dry_run: "{{ make_cifmw_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cifmw_cleanup_env|default({})), **(make_cifmw_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_network.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ne0000644000175000017500000000166515071030124033335 0ustar zuulzuul--- - name: Debug make_bmaas_network_env when: make_bmaas_network_env is defined ansible.builtin.debug: var: make_bmaas_network_env - name: Debug make_bmaas_network_params when: make_bmaas_network_params is defined ansible.builtin.debug: var: make_bmaas_network_params - name: Run bmaas_network retries: "{{ make_bmaas_network_retries | default(omit) }}" delay: "{{ make_bmaas_network_delay | default(omit) }}" until: "{{ make_bmaas_network_until | default(true) }}" register: "make_bmaas_network_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_network" dry_run: "{{ make_bmaas_network_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_network_env|default({})), **(make_bmaas_network_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_network_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ne0000644000175000017500000000205515071030124033327 0ustar zuulzuul--- - name: Debug make_bmaas_network_cleanup_env when: make_bmaas_network_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_network_cleanup_env - name: Debug make_bmaas_network_cleanup_params when: make_bmaas_network_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_network_cleanup_params - name: Run bmaas_network_cleanup retries: "{{ make_bmaas_network_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_network_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_network_cleanup_until | default(true) }}" register: "make_bmaas_network_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_network_cleanup" dry_run: "{{ make_bmaas_network_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_network_cleanup_env|default({})), **(make_bmaas_network_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000020700000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_route_crc_and_crc_bmaas_networks.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ro0000644000175000017500000000245415071030124033350 0ustar zuulzuul--- - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_env when: make_bmaas_route_crc_and_crc_bmaas_networks_env is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_env - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_params when: make_bmaas_route_crc_and_crc_bmaas_networks_params is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_params - name: Run bmaas_route_crc_and_crc_bmaas_networks retries: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_retries | default(omit) }}" delay: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_delay | default(omit) }}" until: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_until | default(true) }}" register: "make_bmaas_route_crc_and_crc_bmaas_networks_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_route_crc_and_crc_bmaas_networks" dry_run: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_route_crc_and_crc_bmaas_networks_env|default({})), **(make_bmaas_route_crc_and_crc_bmaas_networks_params|default({}))) }}" ././@LongLink0000644000000000000000000000021700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_route_crc_and_crc_bmaas_networks_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ro0000644000175000017500000000264415071030124033351 0ustar zuulzuul--- - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env when: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params when: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params - name: Run bmaas_route_crc_and_crc_bmaas_networks_cleanup retries: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_until | default(true) }}" register: "make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_route_crc_and_crc_bmaas_networks_cleanup" dry_run: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env|default({})), **(make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_attach_network.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000213215071030124033325 0ustar zuulzuul--- - name: Debug make_bmaas_crc_attach_network_env when: make_bmaas_crc_attach_network_env is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_env - name: Debug make_bmaas_crc_attach_network_params when: make_bmaas_crc_attach_network_params is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_params - name: Run bmaas_crc_attach_network retries: "{{ make_bmaas_crc_attach_network_retries | default(omit) }}" delay: "{{ make_bmaas_crc_attach_network_delay | default(omit) }}" until: "{{ make_bmaas_crc_attach_network_until | default(true) }}" register: "make_bmaas_crc_attach_network_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_attach_network" dry_run: "{{ make_bmaas_crc_attach_network_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_attach_network_env|default({})), **(make_bmaas_crc_attach_network_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_attach_network_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000232215071030124033326 0ustar zuulzuul--- - name: Debug make_bmaas_crc_attach_network_cleanup_env when: make_bmaas_crc_attach_network_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_cleanup_env - name: Debug make_bmaas_crc_attach_network_cleanup_params when: make_bmaas_crc_attach_network_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_cleanup_params - name: Run bmaas_crc_attach_network_cleanup retries: "{{ make_bmaas_crc_attach_network_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_crc_attach_network_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_crc_attach_network_cleanup_until | default(true) }}" register: "make_bmaas_crc_attach_network_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_attach_network_cleanup" dry_run: "{{ make_bmaas_crc_attach_network_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_attach_network_cleanup_env|default({})), **(make_bmaas_crc_attach_network_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_baremetal_bridge.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000217015071030124033327 0ustar zuulzuul--- - name: Debug make_bmaas_crc_baremetal_bridge_env when: make_bmaas_crc_baremetal_bridge_env is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_env - name: Debug make_bmaas_crc_baremetal_bridge_params when: make_bmaas_crc_baremetal_bridge_params is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_params - name: Run bmaas_crc_baremetal_bridge retries: "{{ make_bmaas_crc_baremetal_bridge_retries | default(omit) }}" delay: "{{ make_bmaas_crc_baremetal_bridge_delay | default(omit) }}" until: "{{ make_bmaas_crc_baremetal_bridge_until | default(true) }}" register: "make_bmaas_crc_baremetal_bridge_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_baremetal_bridge" dry_run: "{{ make_bmaas_crc_baremetal_bridge_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_baremetal_bridge_env|default({})), **(make_bmaas_crc_baremetal_bridge_params|default({}))) }}" ././@LongLink0000644000000000000000000000020300000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_baremetal_bridge_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000236015071030124033330 0ustar zuulzuul--- - name: Debug make_bmaas_crc_baremetal_bridge_cleanup_env when: make_bmaas_crc_baremetal_bridge_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_cleanup_env - name: Debug make_bmaas_crc_baremetal_bridge_cleanup_params when: make_bmaas_crc_baremetal_bridge_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_cleanup_params - name: Run bmaas_crc_baremetal_bridge_cleanup retries: "{{ make_bmaas_crc_baremetal_bridge_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_crc_baremetal_bridge_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_crc_baremetal_bridge_cleanup_until | default(true) }}" register: "make_bmaas_crc_baremetal_bridge_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_baremetal_bridge_cleanup" dry_run: "{{ make_bmaas_crc_baremetal_bridge_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_baremetal_bridge_cleanup_env|default({})), **(make_bmaas_crc_baremetal_bridge_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_baremetal_net_nad.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ba0000644000175000017500000000211315071030124033302 0ustar zuulzuul--- - name: Debug make_bmaas_baremetal_net_nad_env when: make_bmaas_baremetal_net_nad_env is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_env - name: Debug make_bmaas_baremetal_net_nad_params when: make_bmaas_baremetal_net_nad_params is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_params - name: Run bmaas_baremetal_net_nad retries: "{{ make_bmaas_baremetal_net_nad_retries | default(omit) }}" delay: "{{ make_bmaas_baremetal_net_nad_delay | default(omit) }}" until: "{{ make_bmaas_baremetal_net_nad_until | default(true) }}" register: "make_bmaas_baremetal_net_nad_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_baremetal_net_nad" dry_run: "{{ make_bmaas_baremetal_net_nad_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_baremetal_net_nad_env|default({})), **(make_bmaas_baremetal_net_nad_params|default({}))) }}" ././@LongLink0000644000000000000000000000020000000000000011573 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_baremetal_net_nad_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ba0000644000175000017500000000230315071030124033303 0ustar zuulzuul--- - name: Debug make_bmaas_baremetal_net_nad_cleanup_env when: make_bmaas_baremetal_net_nad_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_cleanup_env - name: Debug make_bmaas_baremetal_net_nad_cleanup_params when: make_bmaas_baremetal_net_nad_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_cleanup_params - name: Run bmaas_baremetal_net_nad_cleanup retries: "{{ make_bmaas_baremetal_net_nad_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_baremetal_net_nad_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_baremetal_net_nad_cleanup_until | default(true) }}" register: "make_bmaas_baremetal_net_nad_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_baremetal_net_nad_cleanup" dry_run: "{{ make_bmaas_baremetal_net_nad_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_baremetal_net_nad_cleanup_env|default({})), **(make_bmaas_baremetal_net_nad_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_metallb.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_me0000644000175000017500000000166515071030124033334 0ustar zuulzuul--- - name: Debug make_bmaas_metallb_env when: make_bmaas_metallb_env is defined ansible.builtin.debug: var: make_bmaas_metallb_env - name: Debug make_bmaas_metallb_params when: make_bmaas_metallb_params is defined ansible.builtin.debug: var: make_bmaas_metallb_params - name: Run bmaas_metallb retries: "{{ make_bmaas_metallb_retries | default(omit) }}" delay: "{{ make_bmaas_metallb_delay | default(omit) }}" until: "{{ make_bmaas_metallb_until | default(true) }}" register: "make_bmaas_metallb_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_metallb" dry_run: "{{ make_bmaas_metallb_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_metallb_env|default({})), **(make_bmaas_metallb_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_metallb_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_me0000644000175000017500000000205515071030124033326 0ustar zuulzuul--- - name: Debug make_bmaas_metallb_cleanup_env when: make_bmaas_metallb_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_metallb_cleanup_env - name: Debug make_bmaas_metallb_cleanup_params when: make_bmaas_metallb_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_metallb_cleanup_params - name: Run bmaas_metallb_cleanup retries: "{{ make_bmaas_metallb_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_metallb_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_metallb_cleanup_until | default(true) }}" register: "make_bmaas_metallb_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_metallb_cleanup" dry_run: "{{ make_bmaas_metallb_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_metallb_cleanup_env|default({})), **(make_bmaas_metallb_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_virtual_bms.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_vi0000644000175000017500000000176115071030124033346 0ustar zuulzuul--- - name: Debug make_bmaas_virtual_bms_env when: make_bmaas_virtual_bms_env is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_env - name: Debug make_bmaas_virtual_bms_params when: make_bmaas_virtual_bms_params is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_params - name: Run bmaas_virtual_bms retries: "{{ make_bmaas_virtual_bms_retries | default(omit) }}" delay: "{{ make_bmaas_virtual_bms_delay | default(omit) }}" until: "{{ make_bmaas_virtual_bms_until | default(true) }}" register: "make_bmaas_virtual_bms_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_virtual_bms" dry_run: "{{ make_bmaas_virtual_bms_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_virtual_bms_env|default({})), **(make_bmaas_virtual_bms_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_virtual_bms_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_vi0000644000175000017500000000215115071030124033340 0ustar zuulzuul--- - name: Debug make_bmaas_virtual_bms_cleanup_env when: make_bmaas_virtual_bms_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_cleanup_env - name: Debug make_bmaas_virtual_bms_cleanup_params when: make_bmaas_virtual_bms_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_cleanup_params - name: Run bmaas_virtual_bms_cleanup retries: "{{ make_bmaas_virtual_bms_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_virtual_bms_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_virtual_bms_cleanup_until | default(true) }}" register: "make_bmaas_virtual_bms_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_virtual_bms_cleanup" dry_run: "{{ make_bmaas_virtual_bms_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_virtual_bms_cleanup_env|default({})), **(make_bmaas_virtual_bms_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_sushy_emulator.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_su0000644000175000017500000000203615071030124033353 0ustar zuulzuul--- - name: Debug make_bmaas_sushy_emulator_env when: make_bmaas_sushy_emulator_env is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_env - name: Debug make_bmaas_sushy_emulator_params when: make_bmaas_sushy_emulator_params is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_params - name: Run bmaas_sushy_emulator retries: "{{ make_bmaas_sushy_emulator_retries | default(omit) }}" delay: "{{ make_bmaas_sushy_emulator_delay | default(omit) }}" until: "{{ make_bmaas_sushy_emulator_until | default(true) }}" register: "make_bmaas_sushy_emulator_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_sushy_emulator" dry_run: "{{ make_bmaas_sushy_emulator_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_sushy_emulator_env|default({})), **(make_bmaas_sushy_emulator_params|default({}))) }}" ././@LongLink0000644000000000000000000000017500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_sushy_emulator_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_su0000644000175000017500000000222615071030124033354 0ustar zuulzuul--- - name: Debug make_bmaas_sushy_emulator_cleanup_env when: make_bmaas_sushy_emulator_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_cleanup_env - name: Debug make_bmaas_sushy_emulator_cleanup_params when: make_bmaas_sushy_emulator_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_cleanup_params - name: Run bmaas_sushy_emulator_cleanup retries: "{{ make_bmaas_sushy_emulator_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_sushy_emulator_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_sushy_emulator_cleanup_until | default(true) }}" register: "make_bmaas_sushy_emulator_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_sushy_emulator_cleanup" dry_run: "{{ make_bmaas_sushy_emulator_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_sushy_emulator_cleanup_env|default({})), **(make_bmaas_sushy_emulator_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_sushy_emulator_wait.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_su0000644000175000017500000000215115071030124033351 0ustar zuulzuul--- - name: Debug make_bmaas_sushy_emulator_wait_env when: make_bmaas_sushy_emulator_wait_env is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_wait_env - name: Debug make_bmaas_sushy_emulator_wait_params when: make_bmaas_sushy_emulator_wait_params is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_wait_params - name: Run bmaas_sushy_emulator_wait retries: "{{ make_bmaas_sushy_emulator_wait_retries | default(omit) }}" delay: "{{ make_bmaas_sushy_emulator_wait_delay | default(omit) }}" until: "{{ make_bmaas_sushy_emulator_wait_until | default(true) }}" register: "make_bmaas_sushy_emulator_wait_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_sushy_emulator_wait" dry_run: "{{ make_bmaas_sushy_emulator_wait_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_sushy_emulator_wait_env|default({})), **(make_bmaas_sushy_emulator_wait_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_generate_nodes_yaml.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ge0000644000175000017500000000215115071030124033315 0ustar zuulzuul--- - name: Debug make_bmaas_generate_nodes_yaml_env when: make_bmaas_generate_nodes_yaml_env is defined ansible.builtin.debug: var: make_bmaas_generate_nodes_yaml_env - name: Debug make_bmaas_generate_nodes_yaml_params when: make_bmaas_generate_nodes_yaml_params is defined ansible.builtin.debug: var: make_bmaas_generate_nodes_yaml_params - name: Run bmaas_generate_nodes_yaml retries: "{{ make_bmaas_generate_nodes_yaml_retries | default(omit) }}" delay: "{{ make_bmaas_generate_nodes_yaml_delay | default(omit) }}" until: "{{ make_bmaas_generate_nodes_yaml_until | default(true) }}" register: "make_bmaas_generate_nodes_yaml_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_generate_nodes_yaml" dry_run: "{{ make_bmaas_generate_nodes_yaml_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_generate_nodes_yaml_env|default({})), **(make_bmaas_generate_nodes_yaml_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas.ym0000644000175000017500000000147515071030124033276 0ustar zuulzuul--- - name: Debug make_bmaas_env when: make_bmaas_env is defined ansible.builtin.debug: var: make_bmaas_env - name: Debug make_bmaas_params when: make_bmaas_params is defined ansible.builtin.debug: var: make_bmaas_params - name: Run bmaas retries: "{{ make_bmaas_retries | default(omit) }}" delay: "{{ make_bmaas_delay | default(omit) }}" until: "{{ make_bmaas_until | default(true) }}" register: "make_bmaas_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas" dry_run: "{{ make_bmaas_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_env|default({})), **(make_bmaas_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cl0000644000175000017500000000166515071030124033331 0ustar zuulzuul--- - name: Debug make_bmaas_cleanup_env when: make_bmaas_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_cleanup_env - name: Debug make_bmaas_cleanup_params when: make_bmaas_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_cleanup_params - name: Run bmaas_cleanup retries: "{{ make_bmaas_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_cleanup_until | default(true) }}" register: "make_bmaas_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_cleanup" dry_run: "{{ make_bmaas_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_cleanup_env|default({})), **(make_bmaas_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_system.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_s0000644000175000017500000000165415071030124033372 0ustar zuulzuul--- - name: Debug make_update_system_env when: make_update_system_env is defined ansible.builtin.debug: var: make_update_system_env - name: Debug make_update_system_params when: make_update_system_params is defined ansible.builtin.debug: var: make_update_system_params - name: Run update_system retries: "{{ make_update_system_retries | default(omit) }}" delay: "{{ make_update_system_delay | default(omit) }}" until: "{{ make_update_system_until | default(true) }}" register: "make_update_system_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make update_system" dry_run: "{{ make_update_system_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_update_system_env|default({})), **(make_update_system_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_patch_version.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000210215071030124033367 0ustar zuulzuul--- - name: Debug make_openstack_patch_version_env when: make_openstack_patch_version_env is defined ansible.builtin.debug: var: make_openstack_patch_version_env - name: Debug make_openstack_patch_version_params when: make_openstack_patch_version_params is defined ansible.builtin.debug: var: make_openstack_patch_version_params - name: Run openstack_patch_version retries: "{{ make_openstack_patch_version_retries | default(omit) }}" delay: "{{ make_openstack_patch_version_delay | default(omit) }}" until: "{{ make_openstack_patch_version_until | default(true) }}" register: "make_openstack_patch_version_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_patch_version" dry_run: "{{ make_openstack_patch_version_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_patch_version_env|default({})), **(make_openstack_patch_version_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_generate_keys.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000214015071030124033332 0ustar zuulzuul--- - name: Debug make_edpm_deploy_generate_keys_env when: make_edpm_deploy_generate_keys_env is defined ansible.builtin.debug: var: make_edpm_deploy_generate_keys_env - name: Debug make_edpm_deploy_generate_keys_params when: make_edpm_deploy_generate_keys_params is defined ansible.builtin.debug: var: make_edpm_deploy_generate_keys_params - name: Run edpm_deploy_generate_keys retries: "{{ make_edpm_deploy_generate_keys_retries | default(omit) }}" delay: "{{ make_edpm_deploy_generate_keys_delay | default(omit) }}" until: "{{ make_edpm_deploy_generate_keys_until | default(true) }}" register: "make_edpm_deploy_generate_keys_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_generate_keys" dry_run: "{{ make_edpm_deploy_generate_keys_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_generate_keys_env|default({})), **(make_edpm_deploy_generate_keys_params|default({}))) }}" ././@LongLink0000644000000000000000000000020000000000000011573 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_patch_ansible_runner_image.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_pat0000644000175000017500000000227215071030124033354 0ustar zuulzuul--- - name: Debug make_edpm_patch_ansible_runner_image_env when: make_edpm_patch_ansible_runner_image_env is defined ansible.builtin.debug: var: make_edpm_patch_ansible_runner_image_env - name: Debug make_edpm_patch_ansible_runner_image_params when: make_edpm_patch_ansible_runner_image_params is defined ansible.builtin.debug: var: make_edpm_patch_ansible_runner_image_params - name: Run edpm_patch_ansible_runner_image retries: "{{ make_edpm_patch_ansible_runner_image_retries | default(omit) }}" delay: "{{ make_edpm_patch_ansible_runner_image_delay | default(omit) }}" until: "{{ make_edpm_patch_ansible_runner_image_until | default(true) }}" register: "make_edpm_patch_ansible_runner_image_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_patch_ansible_runner_image" dry_run: "{{ make_edpm_patch_ansible_runner_image_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_patch_ansible_runner_image_env|default({})), **(make_edpm_patch_ansible_runner_image_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000173115071030124033337 0ustar zuulzuul--- - name: Debug make_edpm_deploy_prep_env when: make_edpm_deploy_prep_env is defined ansible.builtin.debug: var: make_edpm_deploy_prep_env - name: Debug make_edpm_deploy_prep_params when: make_edpm_deploy_prep_params is defined ansible.builtin.debug: var: make_edpm_deploy_prep_params - name: Run edpm_deploy_prep retries: "{{ make_edpm_deploy_prep_retries | default(omit) }}" delay: "{{ make_edpm_deploy_prep_delay | default(omit) }}" until: "{{ make_edpm_deploy_prep_until | default(true) }}" register: "make_edpm_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_prep" dry_run: "{{ make_edpm_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_prep_env|default({})), **(make_edpm_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000200615071030124033333 0ustar zuulzuul--- - name: Debug make_edpm_deploy_cleanup_env when: make_edpm_deploy_cleanup_env is defined ansible.builtin.debug: var: make_edpm_deploy_cleanup_env - name: Debug make_edpm_deploy_cleanup_params when: make_edpm_deploy_cleanup_params is defined ansible.builtin.debug: var: make_edpm_deploy_cleanup_params - name: Run edpm_deploy_cleanup retries: "{{ make_edpm_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_deploy_cleanup_delay | default(omit) }}" until: "{{ make_edpm_deploy_cleanup_until | default(true) }}" register: "make_edpm_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_cleanup" dry_run: "{{ make_edpm_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_cleanup_env|default({})), **(make_edpm_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000161615071030124033341 0ustar zuulzuul--- - name: Debug make_edpm_deploy_env when: make_edpm_deploy_env is defined ansible.builtin.debug: var: make_edpm_deploy_env - name: Debug make_edpm_deploy_params when: make_edpm_deploy_params is defined ansible.builtin.debug: var: make_edpm_deploy_params - name: Run edpm_deploy retries: "{{ make_edpm_deploy_retries | default(omit) }}" delay: "{{ make_edpm_deploy_delay | default(omit) }}" until: "{{ make_edpm_deploy_until | default(true) }}" register: "make_edpm_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy" dry_run: "{{ make_edpm_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_env|default({})), **(make_edpm_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_baremetal_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000215715071030124033342 0ustar zuulzuul--- - name: Debug make_edpm_deploy_baremetal_prep_env when: make_edpm_deploy_baremetal_prep_env is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_prep_env - name: Debug make_edpm_deploy_baremetal_prep_params when: make_edpm_deploy_baremetal_prep_params is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_prep_params - name: Run edpm_deploy_baremetal_prep retries: "{{ make_edpm_deploy_baremetal_prep_retries | default(omit) }}" delay: "{{ make_edpm_deploy_baremetal_prep_delay | default(omit) }}" until: "{{ make_edpm_deploy_baremetal_prep_until | default(true) }}" register: "make_edpm_deploy_baremetal_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_baremetal_prep" dry_run: "{{ make_edpm_deploy_baremetal_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_baremetal_prep_env|default({})), **(make_edpm_deploy_baremetal_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_baremetal.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000204415071030124033335 0ustar zuulzuul--- - name: Debug make_edpm_deploy_baremetal_env when: make_edpm_deploy_baremetal_env is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_env - name: Debug make_edpm_deploy_baremetal_params when: make_edpm_deploy_baremetal_params is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_params - name: Run edpm_deploy_baremetal retries: "{{ make_edpm_deploy_baremetal_retries | default(omit) }}" delay: "{{ make_edpm_deploy_baremetal_delay | default(omit) }}" until: "{{ make_edpm_deploy_baremetal_until | default(true) }}" register: "make_edpm_deploy_baremetal_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_baremetal" dry_run: "{{ make_edpm_deploy_baremetal_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_baremetal_env|default({})), **(make_edpm_deploy_baremetal_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wait_deploy_baremetal.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wai0000644000175000017500000000215715071030124033352 0ustar zuulzuul--- - name: Debug make_edpm_wait_deploy_baremetal_env when: make_edpm_wait_deploy_baremetal_env is defined ansible.builtin.debug: var: make_edpm_wait_deploy_baremetal_env - name: Debug make_edpm_wait_deploy_baremetal_params when: make_edpm_wait_deploy_baremetal_params is defined ansible.builtin.debug: var: make_edpm_wait_deploy_baremetal_params - name: Run edpm_wait_deploy_baremetal retries: "{{ make_edpm_wait_deploy_baremetal_retries | default(omit) }}" delay: "{{ make_edpm_wait_deploy_baremetal_delay | default(omit) }}" until: "{{ make_edpm_wait_deploy_baremetal_until | default(true) }}" register: "make_edpm_wait_deploy_baremetal_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_wait_deploy_baremetal" dry_run: "{{ make_edpm_wait_deploy_baremetal_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_wait_deploy_baremetal_env|default({})), **(make_edpm_wait_deploy_baremetal_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wait_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wai0000644000175000017500000000173115071030124033347 0ustar zuulzuul--- - name: Debug make_edpm_wait_deploy_env when: make_edpm_wait_deploy_env is defined ansible.builtin.debug: var: make_edpm_wait_deploy_env - name: Debug make_edpm_wait_deploy_params when: make_edpm_wait_deploy_params is defined ansible.builtin.debug: var: make_edpm_wait_deploy_params - name: Run edpm_wait_deploy retries: "{{ make_edpm_wait_deploy_retries | default(omit) }}" delay: "{{ make_edpm_wait_deploy_delay | default(omit) }}" until: "{{ make_edpm_wait_deploy_until | default(true) }}" register: "make_edpm_wait_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_wait_deploy" dry_run: "{{ make_edpm_wait_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_wait_deploy_env|default({})), **(make_edpm_wait_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_register_dns.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_reg0000644000175000017500000000175015071030124033345 0ustar zuulzuul--- - name: Debug make_edpm_register_dns_env when: make_edpm_register_dns_env is defined ansible.builtin.debug: var: make_edpm_register_dns_env - name: Debug make_edpm_register_dns_params when: make_edpm_register_dns_params is defined ansible.builtin.debug: var: make_edpm_register_dns_params - name: Run edpm_register_dns retries: "{{ make_edpm_register_dns_retries | default(omit) }}" delay: "{{ make_edpm_register_dns_delay | default(omit) }}" until: "{{ make_edpm_register_dns_until | default(true) }}" register: "make_edpm_register_dns_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_register_dns" dry_run: "{{ make_edpm_register_dns_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_register_dns_env|default({})), **(make_edpm_register_dns_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_nova_discover_hosts.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_nov0000644000175000017500000000212115071030124033363 0ustar zuulzuul--- - name: Debug make_edpm_nova_discover_hosts_env when: make_edpm_nova_discover_hosts_env is defined ansible.builtin.debug: var: make_edpm_nova_discover_hosts_env - name: Debug make_edpm_nova_discover_hosts_params when: make_edpm_nova_discover_hosts_params is defined ansible.builtin.debug: var: make_edpm_nova_discover_hosts_params - name: Run edpm_nova_discover_hosts retries: "{{ make_edpm_nova_discover_hosts_retries | default(omit) }}" delay: "{{ make_edpm_nova_discover_hosts_delay | default(omit) }}" until: "{{ make_edpm_nova_discover_hosts_until | default(true) }}" register: "make_edpm_nova_discover_hosts_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_nova_discover_hosts" dry_run: "{{ make_edpm_nova_discover_hosts_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_nova_discover_hosts_env|default({})), **(make_edpm_nova_discover_hosts_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_crds.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000167315071030124033403 0ustar zuulzuul--- - name: Debug make_openstack_crds_env when: make_openstack_crds_env is defined ansible.builtin.debug: var: make_openstack_crds_env - name: Debug make_openstack_crds_params when: make_openstack_crds_params is defined ansible.builtin.debug: var: make_openstack_crds_params - name: Run openstack_crds retries: "{{ make_openstack_crds_retries | default(omit) }}" delay: "{{ make_openstack_crds_delay | default(omit) }}" until: "{{ make_openstack_crds_until | default(true) }}" register: "make_openstack_crds_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_crds" dry_run: "{{ make_openstack_crds_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_crds_env|default({})), **(make_openstack_crds_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_crds_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000206315071030124033375 0ustar zuulzuul--- - name: Debug make_openstack_crds_cleanup_env when: make_openstack_crds_cleanup_env is defined ansible.builtin.debug: var: make_openstack_crds_cleanup_env - name: Debug make_openstack_crds_cleanup_params when: make_openstack_crds_cleanup_params is defined ansible.builtin.debug: var: make_openstack_crds_cleanup_params - name: Run openstack_crds_cleanup retries: "{{ make_openstack_crds_cleanup_retries | default(omit) }}" delay: "{{ make_openstack_crds_cleanup_delay | default(omit) }}" until: "{{ make_openstack_crds_cleanup_until | default(true) }}" register: "make_openstack_crds_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_crds_cleanup" dry_run: "{{ make_openstack_crds_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_crds_cleanup_env|default({})), **(make_openstack_crds_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_networker_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000215715071030124033342 0ustar zuulzuul--- - name: Debug make_edpm_deploy_networker_prep_env when: make_edpm_deploy_networker_prep_env is defined ansible.builtin.debug: var: make_edpm_deploy_networker_prep_env - name: Debug make_edpm_deploy_networker_prep_params when: make_edpm_deploy_networker_prep_params is defined ansible.builtin.debug: var: make_edpm_deploy_networker_prep_params - name: Run edpm_deploy_networker_prep retries: "{{ make_edpm_deploy_networker_prep_retries | default(omit) }}" delay: "{{ make_edpm_deploy_networker_prep_delay | default(omit) }}" until: "{{ make_edpm_deploy_networker_prep_until | default(true) }}" register: "make_edpm_deploy_networker_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_networker_prep" dry_run: "{{ make_edpm_deploy_networker_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_networker_prep_env|default({})), **(make_edpm_deploy_networker_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000017600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_networker_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000223415071030124033336 0ustar zuulzuul--- - name: Debug make_edpm_deploy_networker_cleanup_env when: make_edpm_deploy_networker_cleanup_env is defined ansible.builtin.debug: var: make_edpm_deploy_networker_cleanup_env - name: Debug make_edpm_deploy_networker_cleanup_params when: make_edpm_deploy_networker_cleanup_params is defined ansible.builtin.debug: var: make_edpm_deploy_networker_cleanup_params - name: Run edpm_deploy_networker_cleanup retries: "{{ make_edpm_deploy_networker_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_deploy_networker_cleanup_delay | default(omit) }}" until: "{{ make_edpm_deploy_networker_cleanup_until | default(true) }}" register: "make_edpm_deploy_networker_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_networker_cleanup" dry_run: "{{ make_edpm_deploy_networker_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_networker_cleanup_env|default({})), **(make_edpm_deploy_networker_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_networker.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000204415071030124033335 0ustar zuulzuul--- - name: Debug make_edpm_deploy_networker_env when: make_edpm_deploy_networker_env is defined ansible.builtin.debug: var: make_edpm_deploy_networker_env - name: Debug make_edpm_deploy_networker_params when: make_edpm_deploy_networker_params is defined ansible.builtin.debug: var: make_edpm_deploy_networker_params - name: Run edpm_deploy_networker retries: "{{ make_edpm_deploy_networker_retries | default(omit) }}" delay: "{{ make_edpm_deploy_networker_delay | default(omit) }}" until: "{{ make_edpm_deploy_networker_until | default(true) }}" register: "make_edpm_deploy_networker_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_networker" dry_run: "{{ make_edpm_deploy_networker_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_networker_env|default({})), **(make_edpm_deploy_networker_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_pr0000644000175000017500000000157715071030124033372 0ustar zuulzuul--- - name: Debug make_infra_prep_env when: make_infra_prep_env is defined ansible.builtin.debug: var: make_infra_prep_env - name: Debug make_infra_prep_params when: make_infra_prep_params is defined ansible.builtin.debug: var: make_infra_prep_params - name: Run infra_prep retries: "{{ make_infra_prep_retries | default(omit) }}" delay: "{{ make_infra_prep_delay | default(omit) }}" until: "{{ make_infra_prep_until | default(true) }}" register: "make_infra_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_prep" dry_run: "{{ make_infra_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_prep_env|default({})), **(make_infra_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra.ym0000644000175000017500000000146415071030124033310 0ustar zuulzuul--- - name: Debug make_infra_env when: make_infra_env is defined ansible.builtin.debug: var: make_infra_env - name: Debug make_infra_params when: make_infra_params is defined ansible.builtin.debug: var: make_infra_params - name: Run infra retries: "{{ make_infra_retries | default(omit) }}" delay: "{{ make_infra_delay | default(omit) }}" until: "{{ make_infra_until | default(true) }}" register: "make_infra_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra" dry_run: "{{ make_infra_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_env|default({})), **(make_infra_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_cl0000644000175000017500000000165415071030124033343 0ustar zuulzuul--- - name: Debug make_infra_cleanup_env when: make_infra_cleanup_env is defined ansible.builtin.debug: var: make_infra_cleanup_env - name: Debug make_infra_cleanup_params when: make_infra_cleanup_params is defined ansible.builtin.debug: var: make_infra_cleanup_params - name: Run infra_cleanup retries: "{{ make_infra_cleanup_retries | default(omit) }}" delay: "{{ make_infra_cleanup_delay | default(omit) }}" until: "{{ make_infra_cleanup_until | default(true) }}" register: "make_infra_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_cleanup" dry_run: "{{ make_infra_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_cleanup_env|default({})), **(make_infra_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_depl0000644000175000017500000000171215071030124033351 0ustar zuulzuul--- - name: Debug make_dns_deploy_prep_env when: make_dns_deploy_prep_env is defined ansible.builtin.debug: var: make_dns_deploy_prep_env - name: Debug make_dns_deploy_prep_params when: make_dns_deploy_prep_params is defined ansible.builtin.debug: var: make_dns_deploy_prep_params - name: Run dns_deploy_prep retries: "{{ make_dns_deploy_prep_retries | default(omit) }}" delay: "{{ make_dns_deploy_prep_delay | default(omit) }}" until: "{{ make_dns_deploy_prep_until | default(true) }}" register: "make_dns_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make dns_deploy_prep" dry_run: "{{ make_dns_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_dns_deploy_prep_env|default({})), **(make_dns_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_depl0000644000175000017500000000157715071030124033362 0ustar zuulzuul--- - name: Debug make_dns_deploy_env when: make_dns_deploy_env is defined ansible.builtin.debug: var: make_dns_deploy_env - name: Debug make_dns_deploy_params when: make_dns_deploy_params is defined ansible.builtin.debug: var: make_dns_deploy_params - name: Run dns_deploy retries: "{{ make_dns_deploy_retries | default(omit) }}" delay: "{{ make_dns_deploy_delay | default(omit) }}" until: "{{ make_dns_deploy_until | default(true) }}" register: "make_dns_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make dns_deploy" dry_run: "{{ make_dns_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_dns_deploy_env|default({})), **(make_dns_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_depl0000644000175000017500000000176715071030124033363 0ustar zuulzuul--- - name: Debug make_dns_deploy_cleanup_env when: make_dns_deploy_cleanup_env is defined ansible.builtin.debug: var: make_dns_deploy_cleanup_env - name: Debug make_dns_deploy_cleanup_params when: make_dns_deploy_cleanup_params is defined ansible.builtin.debug: var: make_dns_deploy_cleanup_params - name: Run dns_deploy_cleanup retries: "{{ make_dns_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_dns_deploy_cleanup_delay | default(omit) }}" until: "{{ make_dns_deploy_cleanup_until | default(true) }}" register: "make_dns_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make dns_deploy_cleanup" dry_run: "{{ make_dns_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_dns_deploy_cleanup_env|default({})), **(make_dns_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfig_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfi0000644000175000017500000000204415071030124033365 0ustar zuulzuul--- - name: Debug make_netconfig_deploy_prep_env when: make_netconfig_deploy_prep_env is defined ansible.builtin.debug: var: make_netconfig_deploy_prep_env - name: Debug make_netconfig_deploy_prep_params when: make_netconfig_deploy_prep_params is defined ansible.builtin.debug: var: make_netconfig_deploy_prep_params - name: Run netconfig_deploy_prep retries: "{{ make_netconfig_deploy_prep_retries | default(omit) }}" delay: "{{ make_netconfig_deploy_prep_delay | default(omit) }}" until: "{{ make_netconfig_deploy_prep_until | default(true) }}" register: "make_netconfig_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netconfig_deploy_prep" dry_run: "{{ make_netconfig_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netconfig_deploy_prep_env|default({})), **(make_netconfig_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfig_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfi0000644000175000017500000000173115071030124033367 0ustar zuulzuul--- - name: Debug make_netconfig_deploy_env when: make_netconfig_deploy_env is defined ansible.builtin.debug: var: make_netconfig_deploy_env - name: Debug make_netconfig_deploy_params when: make_netconfig_deploy_params is defined ansible.builtin.debug: var: make_netconfig_deploy_params - name: Run netconfig_deploy retries: "{{ make_netconfig_deploy_retries | default(omit) }}" delay: "{{ make_netconfig_deploy_delay | default(omit) }}" until: "{{ make_netconfig_deploy_until | default(true) }}" register: "make_netconfig_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netconfig_deploy" dry_run: "{{ make_netconfig_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netconfig_deploy_env|default({})), **(make_netconfig_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfig_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfi0000644000175000017500000000212115071030124033361 0ustar zuulzuul--- - name: Debug make_netconfig_deploy_cleanup_env when: make_netconfig_deploy_cleanup_env is defined ansible.builtin.debug: var: make_netconfig_deploy_cleanup_env - name: Debug make_netconfig_deploy_cleanup_params when: make_netconfig_deploy_cleanup_params is defined ansible.builtin.debug: var: make_netconfig_deploy_cleanup_params - name: Run netconfig_deploy_cleanup retries: "{{ make_netconfig_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_netconfig_deploy_cleanup_delay | default(omit) }}" until: "{{ make_netconfig_deploy_cleanup_until | default(true) }}" register: "make_netconfig_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netconfig_deploy_cleanup" dry_run: "{{ make_netconfig_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netconfig_deploy_cleanup_env|default({})), **(make_netconfig_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcached_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcache0000644000175000017500000000204415071030124033322 0ustar zuulzuul--- - name: Debug make_memcached_deploy_prep_env when: make_memcached_deploy_prep_env is defined ansible.builtin.debug: var: make_memcached_deploy_prep_env - name: Debug make_memcached_deploy_prep_params when: make_memcached_deploy_prep_params is defined ansible.builtin.debug: var: make_memcached_deploy_prep_params - name: Run memcached_deploy_prep retries: "{{ make_memcached_deploy_prep_retries | default(omit) }}" delay: "{{ make_memcached_deploy_prep_delay | default(omit) }}" until: "{{ make_memcached_deploy_prep_until | default(true) }}" register: "make_memcached_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make memcached_deploy_prep" dry_run: "{{ make_memcached_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_memcached_deploy_prep_env|default({})), **(make_memcached_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcached_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcache0000644000175000017500000000173115071030124033324 0ustar zuulzuul--- - name: Debug make_memcached_deploy_env when: make_memcached_deploy_env is defined ansible.builtin.debug: var: make_memcached_deploy_env - name: Debug make_memcached_deploy_params when: make_memcached_deploy_params is defined ansible.builtin.debug: var: make_memcached_deploy_params - name: Run memcached_deploy retries: "{{ make_memcached_deploy_retries | default(omit) }}" delay: "{{ make_memcached_deploy_delay | default(omit) }}" until: "{{ make_memcached_deploy_until | default(true) }}" register: "make_memcached_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make memcached_deploy" dry_run: "{{ make_memcached_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_memcached_deploy_env|default({})), **(make_memcached_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcached_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcache0000644000175000017500000000212115071030124033316 0ustar zuulzuul--- - name: Debug make_memcached_deploy_cleanup_env when: make_memcached_deploy_cleanup_env is defined ansible.builtin.debug: var: make_memcached_deploy_cleanup_env - name: Debug make_memcached_deploy_cleanup_params when: make_memcached_deploy_cleanup_params is defined ansible.builtin.debug: var: make_memcached_deploy_cleanup_params - name: Run memcached_deploy_cleanup retries: "{{ make_memcached_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_memcached_deploy_cleanup_delay | default(omit) }}" until: "{{ make_memcached_deploy_cleanup_until | default(true) }}" register: "make_memcached_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make memcached_deploy_cleanup" dry_run: "{{ make_memcached_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_memcached_deploy_cleanup_env|default({})), **(make_memcached_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000165415071030124033427 0ustar zuulzuul--- - name: Debug make_keystone_prep_env when: make_keystone_prep_env is defined ansible.builtin.debug: var: make_keystone_prep_env - name: Debug make_keystone_prep_params when: make_keystone_prep_params is defined ansible.builtin.debug: var: make_keystone_prep_params - name: Run keystone_prep retries: "{{ make_keystone_prep_retries | default(omit) }}" delay: "{{ make_keystone_prep_delay | default(omit) }}" until: "{{ make_keystone_prep_until | default(true) }}" register: "make_keystone_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_prep" dry_run: "{{ make_keystone_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_prep_env|default({})), **(make_keystone_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000154115071030124033422 0ustar zuulzuul--- - name: Debug make_keystone_env when: make_keystone_env is defined ansible.builtin.debug: var: make_keystone_env - name: Debug make_keystone_params when: make_keystone_params is defined ansible.builtin.debug: var: make_keystone_params - name: Run keystone retries: "{{ make_keystone_retries | default(omit) }}" delay: "{{ make_keystone_delay | default(omit) }}" until: "{{ make_keystone_until | default(true) }}" register: "make_keystone_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone" dry_run: "{{ make_keystone_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_env|default({})), **(make_keystone_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000173115071030124033423 0ustar zuulzuul--- - name: Debug make_keystone_cleanup_env when: make_keystone_cleanup_env is defined ansible.builtin.debug: var: make_keystone_cleanup_env - name: Debug make_keystone_cleanup_params when: make_keystone_cleanup_params is defined ansible.builtin.debug: var: make_keystone_cleanup_params - name: Run keystone_cleanup retries: "{{ make_keystone_cleanup_retries | default(omit) }}" delay: "{{ make_keystone_cleanup_delay | default(omit) }}" until: "{{ make_keystone_cleanup_until | default(true) }}" register: "make_keystone_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_cleanup" dry_run: "{{ make_keystone_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_cleanup_env|default({})), **(make_keystone_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000202515071030124033420 0ustar zuulzuul--- - name: Debug make_keystone_deploy_prep_env when: make_keystone_deploy_prep_env is defined ansible.builtin.debug: var: make_keystone_deploy_prep_env - name: Debug make_keystone_deploy_prep_params when: make_keystone_deploy_prep_params is defined ansible.builtin.debug: var: make_keystone_deploy_prep_params - name: Run keystone_deploy_prep retries: "{{ make_keystone_deploy_prep_retries | default(omit) }}" delay: "{{ make_keystone_deploy_prep_delay | default(omit) }}" until: "{{ make_keystone_deploy_prep_until | default(true) }}" register: "make_keystone_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_deploy_prep" dry_run: "{{ make_keystone_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_deploy_prep_env|default({})), **(make_keystone_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000171215071030124033422 0ustar zuulzuul--- - name: Debug make_keystone_deploy_env when: make_keystone_deploy_env is defined ansible.builtin.debug: var: make_keystone_deploy_env - name: Debug make_keystone_deploy_params when: make_keystone_deploy_params is defined ansible.builtin.debug: var: make_keystone_deploy_params - name: Run keystone_deploy retries: "{{ make_keystone_deploy_retries | default(omit) }}" delay: "{{ make_keystone_deploy_delay | default(omit) }}" until: "{{ make_keystone_deploy_until | default(true) }}" register: "make_keystone_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_deploy" dry_run: "{{ make_keystone_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_deploy_env|default({})), **(make_keystone_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000210215071030124033414 0ustar zuulzuul--- - name: Debug make_keystone_deploy_cleanup_env when: make_keystone_deploy_cleanup_env is defined ansible.builtin.debug: var: make_keystone_deploy_cleanup_env - name: Debug make_keystone_deploy_cleanup_params when: make_keystone_deploy_cleanup_params is defined ansible.builtin.debug: var: make_keystone_deploy_cleanup_params - name: Run keystone_deploy_cleanup retries: "{{ make_keystone_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_keystone_deploy_cleanup_delay | default(omit) }}" until: "{{ make_keystone_deploy_cleanup_until | default(true) }}" register: "make_keystone_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_deploy_cleanup" dry_run: "{{ make_keystone_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_deploy_cleanup_env|default({})), **(make_keystone_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000165415071030124033327 0ustar zuulzuul--- - name: Debug make_barbican_prep_env when: make_barbican_prep_env is defined ansible.builtin.debug: var: make_barbican_prep_env - name: Debug make_barbican_prep_params when: make_barbican_prep_params is defined ansible.builtin.debug: var: make_barbican_prep_params - name: Run barbican_prep retries: "{{ make_barbican_prep_retries | default(omit) }}" delay: "{{ make_barbican_prep_delay | default(omit) }}" until: "{{ make_barbican_prep_until | default(true) }}" register: "make_barbican_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_prep" dry_run: "{{ make_barbican_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_prep_env|default({})), **(make_barbican_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000154115071030124033322 0ustar zuulzuul--- - name: Debug make_barbican_env when: make_barbican_env is defined ansible.builtin.debug: var: make_barbican_env - name: Debug make_barbican_params when: make_barbican_params is defined ansible.builtin.debug: var: make_barbican_params - name: Run barbican retries: "{{ make_barbican_retries | default(omit) }}" delay: "{{ make_barbican_delay | default(omit) }}" until: "{{ make_barbican_until | default(true) }}" register: "make_barbican_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican" dry_run: "{{ make_barbican_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_env|default({})), **(make_barbican_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000173115071030124033323 0ustar zuulzuul--- - name: Debug make_barbican_cleanup_env when: make_barbican_cleanup_env is defined ansible.builtin.debug: var: make_barbican_cleanup_env - name: Debug make_barbican_cleanup_params when: make_barbican_cleanup_params is defined ansible.builtin.debug: var: make_barbican_cleanup_params - name: Run barbican_cleanup retries: "{{ make_barbican_cleanup_retries | default(omit) }}" delay: "{{ make_barbican_cleanup_delay | default(omit) }}" until: "{{ make_barbican_cleanup_until | default(true) }}" register: "make_barbican_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_cleanup" dry_run: "{{ make_barbican_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_cleanup_env|default({})), **(make_barbican_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000202515071030124033320 0ustar zuulzuul--- - name: Debug make_barbican_deploy_prep_env when: make_barbican_deploy_prep_env is defined ansible.builtin.debug: var: make_barbican_deploy_prep_env - name: Debug make_barbican_deploy_prep_params when: make_barbican_deploy_prep_params is defined ansible.builtin.debug: var: make_barbican_deploy_prep_params - name: Run barbican_deploy_prep retries: "{{ make_barbican_deploy_prep_retries | default(omit) }}" delay: "{{ make_barbican_deploy_prep_delay | default(omit) }}" until: "{{ make_barbican_deploy_prep_until | default(true) }}" register: "make_barbican_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy_prep" dry_run: "{{ make_barbican_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_prep_env|default({})), **(make_barbican_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000171215071030124033322 0ustar zuulzuul--- - name: Debug make_barbican_deploy_env when: make_barbican_deploy_env is defined ansible.builtin.debug: var: make_barbican_deploy_env - name: Debug make_barbican_deploy_params when: make_barbican_deploy_params is defined ansible.builtin.debug: var: make_barbican_deploy_params - name: Run barbican_deploy retries: "{{ make_barbican_deploy_retries | default(omit) }}" delay: "{{ make_barbican_deploy_delay | default(omit) }}" until: "{{ make_barbican_deploy_until | default(true) }}" register: "make_barbican_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy" dry_run: "{{ make_barbican_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_env|default({})), **(make_barbican_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy_validate.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000212115071030124033315 0ustar zuulzuul--- - name: Debug make_barbican_deploy_validate_env when: make_barbican_deploy_validate_env is defined ansible.builtin.debug: var: make_barbican_deploy_validate_env - name: Debug make_barbican_deploy_validate_params when: make_barbican_deploy_validate_params is defined ansible.builtin.debug: var: make_barbican_deploy_validate_params - name: Run barbican_deploy_validate retries: "{{ make_barbican_deploy_validate_retries | default(omit) }}" delay: "{{ make_barbican_deploy_validate_delay | default(omit) }}" until: "{{ make_barbican_deploy_validate_until | default(true) }}" register: "make_barbican_deploy_validate_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy_validate" dry_run: "{{ make_barbican_deploy_validate_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_validate_env|default({})), **(make_barbican_deploy_validate_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000210215071030124033314 0ustar zuulzuul--- - name: Debug make_barbican_deploy_cleanup_env when: make_barbican_deploy_cleanup_env is defined ansible.builtin.debug: var: make_barbican_deploy_cleanup_env - name: Debug make_barbican_deploy_cleanup_params when: make_barbican_deploy_cleanup_params is defined ansible.builtin.debug: var: make_barbican_deploy_cleanup_params - name: Run barbican_deploy_cleanup retries: "{{ make_barbican_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_barbican_deploy_cleanup_delay | default(omit) }}" until: "{{ make_barbican_deploy_cleanup_until | default(true) }}" register: "make_barbican_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy_cleanup" dry_run: "{{ make_barbican_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_cleanup_env|default({})), **(make_barbican_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb.0000644000175000017500000000152215071030124033235 0ustar zuulzuul--- - name: Debug make_mariadb_env when: make_mariadb_env is defined ansible.builtin.debug: var: make_mariadb_env - name: Debug make_mariadb_params when: make_mariadb_params is defined ansible.builtin.debug: var: make_mariadb_params - name: Run mariadb retries: "{{ make_mariadb_retries | default(omit) }}" delay: "{{ make_mariadb_delay | default(omit) }}" until: "{{ make_mariadb_until | default(true) }}" register: "make_mariadb_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb" dry_run: "{{ make_mariadb_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_env|default({})), **(make_mariadb_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000171215071030124033317 0ustar zuulzuul--- - name: Debug make_mariadb_cleanup_env when: make_mariadb_cleanup_env is defined ansible.builtin.debug: var: make_mariadb_cleanup_env - name: Debug make_mariadb_cleanup_params when: make_mariadb_cleanup_params is defined ansible.builtin.debug: var: make_mariadb_cleanup_params - name: Run mariadb_cleanup retries: "{{ make_mariadb_cleanup_retries | default(omit) }}" delay: "{{ make_mariadb_cleanup_delay | default(omit) }}" until: "{{ make_mariadb_cleanup_until | default(true) }}" register: "make_mariadb_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_cleanup" dry_run: "{{ make_mariadb_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_cleanup_env|default({})), **(make_mariadb_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000200615071030124033314 0ustar zuulzuul--- - name: Debug make_mariadb_deploy_prep_env when: make_mariadb_deploy_prep_env is defined ansible.builtin.debug: var: make_mariadb_deploy_prep_env - name: Debug make_mariadb_deploy_prep_params when: make_mariadb_deploy_prep_params is defined ansible.builtin.debug: var: make_mariadb_deploy_prep_params - name: Run mariadb_deploy_prep retries: "{{ make_mariadb_deploy_prep_retries | default(omit) }}" delay: "{{ make_mariadb_deploy_prep_delay | default(omit) }}" until: "{{ make_mariadb_deploy_prep_until | default(true) }}" register: "make_mariadb_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_deploy_prep" dry_run: "{{ make_mariadb_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_deploy_prep_env|default({})), **(make_mariadb_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000167315071030124033325 0ustar zuulzuul--- - name: Debug make_mariadb_deploy_env when: make_mariadb_deploy_env is defined ansible.builtin.debug: var: make_mariadb_deploy_env - name: Debug make_mariadb_deploy_params when: make_mariadb_deploy_params is defined ansible.builtin.debug: var: make_mariadb_deploy_params - name: Run mariadb_deploy retries: "{{ make_mariadb_deploy_retries | default(omit) }}" delay: "{{ make_mariadb_deploy_delay | default(omit) }}" until: "{{ make_mariadb_deploy_until | default(true) }}" register: "make_mariadb_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_deploy" dry_run: "{{ make_mariadb_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_deploy_env|default({})), **(make_mariadb_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000206315071030124033317 0ustar zuulzuul--- - name: Debug make_mariadb_deploy_cleanup_env when: make_mariadb_deploy_cleanup_env is defined ansible.builtin.debug: var: make_mariadb_deploy_cleanup_env - name: Debug make_mariadb_deploy_cleanup_params when: make_mariadb_deploy_cleanup_params is defined ansible.builtin.debug: var: make_mariadb_deploy_cleanup_params - name: Run mariadb_deploy_cleanup retries: "{{ make_mariadb_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_mariadb_deploy_cleanup_delay | default(omit) }}" until: "{{ make_mariadb_deploy_cleanup_until | default(true) }}" register: "make_mariadb_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir|default(ansible_user_dir ~ '/ci-framework-data') }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_deploy_cleanup" dry_run: "{{ make_mariadb_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_deploy_cleanup_env|default({})), **(make_mariadb_deploy_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible-vars.yml0000644000175000017500000256001415071030271025460 0ustar zuulzuul_param_dir: changed: true cmd: - ls - /home/zuul/ci-framework-data/artifacts/parameters delta: '0:00:00.007078' end: '2025-10-06 21:12:10.478936' failed: false msg: '' rc: 0 start: '2025-10-06 21:12:10.471858' stderr: '' stderr_lines: [] stdout: 'custom-params.yml install-yamls-params.yml openshift-login-params.yml zuul-params.yml' stdout_lines: - custom-params.yml - install-yamls-params.yml - openshift-login-params.yml - zuul-params.yml zuul_log_id: fa163ec2-ffbe-e6cf-bc15-000000000861-1-controller _param_file: changed: false failed: false stat: exists: false _parsed_vars: changed: false msg: All items completed results: - ansible_loop_var: item changed: false content: cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw
cifmw_basedir: /home/zuul/ci-framework-data
cifmw_build_images_output: {}
cifmw_config_certmanager: true
cifmw_deploy_edpm: true
cifmw_dlrn_report_result: false
cifmw_edpm_prepare_kustomizations:
-   apiVersion: kustomize.config.k8s.io/v1beta1
    kind: Kustomization
    namespace: openstack
    patches:
    -   patch: "apiVersion: core.openstack.org/v1beta1\nkind: OpenStackControlPlane\nmetadata:\n
            \ name: controlplane\nspec:\n  telemetry:\n    enabled: true\n    template:\n
            \     ceilometer:\n        enabled: true\n      metricStorage:\n        enabled:
            true\n        customMonitoringStack:\n          alertmanagerConfig:\n
            \           disabled: true\n          prometheusConfig:\n            enableRemoteWriteReceiver:
            true\n            persistentVolumeClaim:\n              resources:\n                requests:\n
            \                 storage: 20G\n            replicas: 1\n            scrapeInterval:
            30s\n          resourceSelector:\n            matchLabels:\n              service:
            metricStorage\n          retention: 24h"
        target:
            kind: OpenStackControlPlane
    -   patch: "apiVersion: core.openstack.org/v1beta1\nkind: OpenStackControlPlane\nmetadata:\n
            \ name: controlplane\nspec:\n  telemetry:\n    template:\n      metricStorage:\n
            \       monitoringStack: null"
        target:
            kind: OpenStackControlPlane
    -   patch: "apiVersion: core.openstack.org/v1beta1\nkind: OpenStackControlPlane\nmetadata:\n
            \ name: controlplane\nspec:\n  watcher:\n    enabled: true\n    template:\n
            \     decisionengineServiceTemplate:\n        customServiceConfig: |\n
            \         [watcher_cluster_data_model_collectors.compute]\n          period
            = 60\n          [watcher_cluster_data_model_collectors.storage]\n          period
            = 60"
        target:
            kind: OpenStackControlPlane
cifmw_edpm_prepare_skip_crc_storage_creation: true
cifmw_edpm_prepare_timeout: 60
cifmw_edpm_telemetry_enabled_exporters:
- podman_exporter
- openstack_network_exporter
cifmw_extras:
- '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/multinode-ci.yml'
- '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/horizon.yml'
- '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/scenarios/edpm-no-notifications.yml'
- '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/tests/watcher-tempest.yml'
cifmw_installyamls_repos: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls
cifmw_openshift_api: api.crc.testing:6443
cifmw_openshift_password: '123456789'
cifmw_openshift_setup_skip_internal_registry: true
cifmw_openshift_setup_skip_internal_registry_tls_verify: true
cifmw_openshift_skip_tls_verify: true
cifmw_openshift_user: kubeadmin
cifmw_operator_build_meta_name: openstack-operator
cifmw_operator_build_output:
    operators:
        openstack-operator:
            git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9
            git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator
            image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9
            image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9
            image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9
        watcher-operator:
            git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d
            git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator
            image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d
            image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d
            image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d
cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin
cifmw_repo_setup_dist_major_version: 9
cifmw_repo_setup_os_release: centos
cifmw_run_test_role: test_operator
cifmw_run_tests: true
cifmw_test_operator_tempest_concurrency: 1
cifmw_test_operator_tempest_exclude_list: 'watcher_tempest_plugin.*client_functional.*

    watcher_tempest_plugin.tests.scenario.test_execute_strategies.TestExecuteStrategies.test_execute_storage_capacity_balance_strategy

    watcher_tempest_plugin.*\[.*\breal_load\b.*\].*

    watcher_tempest_plugin.tests.scenario.test_execute_zone_migration.TestExecuteZoneMigrationStrategy.test_execute_zone_migration_without_destination_host

    watcher_tempest_plugin.*\[.*\bvolume_migration\b.*\].*

    '
cifmw_test_operator_tempest_external_plugin:
-   changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e
    changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin
    repository: https://opendev.org/openstack/watcher-tempest-plugin.git
cifmw_test_operator_tempest_image_tag: watcher_latest
cifmw_test_operator_tempest_include_list: 'watcher_tempest_plugin.*

    '
cifmw_test_operator_tempest_namespace: podified-epoxy-centos9
cifmw_test_operator_tempest_registry: 38.102.83.53:5001
cifmw_test_operator_tempest_tempestconf_config:
    overrides: 'compute.min_microversion 2.56

        compute.min_compute_nodes 2

        placement.min_microversion 1.29

        compute-feature-enabled.live_migration true

        compute-feature-enabled.block_migration_for_live_migration true

        service_available.sg_core true

        telemetry_services.metric_backends prometheus

        telemetry.disable_ssl_certificate_validation true

        telemetry.ceilometer_polling_interval 15

        optimize.min_microversion 1.0

        optimize.max_microversion 1.4

        optimize.datasource prometheus

        optimize.openstack_type podified

        optimize.proxy_host_address 38.102.83.51

        optimize.proxy_host_user zuul

        optimize.prometheus_host metric-storage-prometheus.openstack.svc

        optimize.prometheus_ssl_enabled true

        optimize.prometheus_ssl_cert_dir /etc/prometheus/secrets/combined-ca-bundle

        optimize.podified_kubeconfig_path /home/zuul/.crc/machines/crc/kubeconfig

        optimize.podified_namespace openstack

        optimize.run_continuous_audit_tests true

        '
cifmw_update_containers: true
cifmw_update_containers_openstack: false
cifmw_update_containers_org: podified-epoxy-centos9
cifmw_update_containers_registry: 38.102.83.53:5001
cifmw_update_containers_tag: watcher_latest
cifmw_update_containers_watcher: true
cifmw_use_crc: false
cifmw_use_libvirt: false
cifmw_zuul_target_host: controller
post_ctlplane_deploy:
-   name: Tune rabbitmq resources
    source: rabbitmq_tuning.yml
    type: playbook
post_deploy:
-   inventory: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/hosts
    name: Download needed tools
    source: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml
    type: playbook
-   name: Patch Openstack Prometheus to enable admin API
    source: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/playbooks/prometheus_admin_api.yaml
    type: playbook
post_infra:
-   inventory: /home/zuul/ci-framework-data/artifacts/zuul_inventory.yml
    name: Fetch nodes facts and save them as parameters
    source: fetch_compute_facts.yml
    type: playbook
pre_deploy:
-   name: 80 Kustomize OpenStack CR
    source: control_plane_horizon.yml
    type: playbook
pre_deploy_create_coo_subscription:
-   name: Deploy cluster-observability-operator
    source: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/playbooks/deploy_cluster_observability_operator.yaml
    type: playbook
pre_infra:
-   connection: local
    inventory: localhost,
    name: Download needed tools
    source: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml
    type: playbook
pre_update:
-   inventory: /home/zuul/ci-framework-data/artifacts/zuul_inventory.yml
    name: Fetch nodes facts and save them as parameters
    source: fetch_compute_facts.yml
    type: playbook
 encoding: base64 failed: false invocation: module_args: src: /home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml item: custom-params.yml source: /home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml - ansible_loop_var: item changed: false content: cifmw_install_yamls_defaults:
    ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24
    ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24
    ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24
    ADOPTED_STORAGE_NETWORK: 172.18.1.0/24
    ADOPTED_TENANT_NETWORK: 172.9.1.0/24
    ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml
    ANSIBLEEE_BRANCH: main
    ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml
    ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest
    ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml
    ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/tests/kuttl/tests
    ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests
    ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator
    ANSIBLEE_COMMIT_HASH: ''
    BARBICAN: config/samples/barbican_v1beta1_barbican.yaml
    BARBICAN_BRANCH: main
    BARBICAN_COMMIT_HASH: ''
    BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml
    BARBICAN_DEPL_IMG: unused
    BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest
    BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml
    BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/tests/kuttl/tests
    BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests
    BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git
    BARBICAN_SERVICE_ENABLED: 'true'
    BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU=
    BAREMETAL_BRANCH: main
    BAREMETAL_COMMIT_HASH: ''
    BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest
    BAREMETAL_OS_CONTAINER_IMG: ''
    BAREMETAL_OS_IMG: ''
    BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git
    BAREMETAL_TIMEOUT: 20m
    BASH_IMG: quay.io/openstack-k8s-operators/bash:latest
    BGP_ASN: '64999'
    BGP_LEAF_1: 100.65.4.1
    BGP_LEAF_2: 100.64.4.1
    BGP_OVN_ROUTING: 'false'
    BGP_PEER_ASN: '64999'
    BGP_SOURCE_IP: 172.30.4.2
    BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42
    BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24
    BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64
    BMAAS_INSTANCE_DISK_SIZE: '20'
    BMAAS_INSTANCE_MEMORY: '4096'
    BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas
    BMAAS_INSTANCE_NET_MODEL: virtio
    BMAAS_INSTANCE_OS_VARIANT: centos-stream9
    BMAAS_INSTANCE_VCPUS: '2'
    BMAAS_INSTANCE_VIRT_TYPE: kvm
    BMAAS_IPV4: 'true'
    BMAAS_IPV6: 'false'
    BMAAS_LIBVIRT_USER: sushyemu
    BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26
    BMAAS_METALLB_POOL_NAME: baremetal
    BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24
    BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64
    BMAAS_NETWORK_NAME: crc-bmaas
    BMAAS_NODE_COUNT: '1'
    BMAAS_OCP_INSTANCE_NAME: crc
    BMAAS_REDFISH_PASSWORD: password
    BMAAS_REDFISH_USERNAME: admin
    BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default
    BMAAS_SUSHY_EMULATOR_DRIVER: libvirt
    BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest
    BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator
    BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml
    BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack
    BMH_NAMESPACE: openstack
    BMO_BRANCH: release-0.9
    BMO_COMMIT_HASH: ''
    BMO_IPA_BRANCH: stable/2024.1
    BMO_IRONIC_HOST: 192.168.122.10
    BMO_PROVISIONING_INTERFACE: ''
    BMO_REPO: https://github.com/metal3-io/baremetal-operator
    BMO_SETUP: false
    BMO_SETUP_ROUTE_REPLACE: 'true'
    BM_CTLPLANE_INTERFACE: enp1s0
    BM_INSTANCE_MEMORY: '8192'
    BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal
    BM_INSTANCE_NAME_SUFFIX: '0'
    BM_NETWORK_NAME: default
    BM_NODE_COUNT: '1'
    BM_ROOT_PASSWORD: ''
    BM_ROOT_PASSWORD_SECRET: ''
    CEILOMETER_CENTRAL_DEPL_IMG: unused
    CEILOMETER_NOTIFICATION_DEPL_IMG: unused
    CEPH_BRANCH: release-1.15
    CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml
    CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml
    CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml
    CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml
    CEPH_IMG: quay.io/ceph/demo:latest-squid
    CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml
    CEPH_REPO: https://github.com/rook/rook.git
    CERTMANAGER_TIMEOUT: 300s
    CHECKOUT_FROM_OPENSTACK_REF: 'true'
    CINDER: config/samples/cinder_v1beta1_cinder.yaml
    CINDERAPI_DEPL_IMG: unused
    CINDERBKP_DEPL_IMG: unused
    CINDERSCH_DEPL_IMG: unused
    CINDERVOL_DEPL_IMG: unused
    CINDER_BRANCH: main
    CINDER_COMMIT_HASH: ''
    CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml
    CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest
    CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml
    CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests
    CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests
    CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git
    CLEANUP_DIR_CMD: rm -Rf
    CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11'
    CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12'
    CRC_HTTPS_PROXY: ''
    CRC_HTTP_PROXY: ''
    CRC_STORAGE_NAMESPACE: crc-storage
    CRC_STORAGE_RETRIES: '3'
    CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz'''
    CRC_VERSION: latest
    DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret
    DATAPLANE_ANSIBLE_USER: ''
    DATAPLANE_COMPUTE_IP: 192.168.122.100
    DATAPLANE_CONTAINER_PREFIX: openstack
    DATAPLANE_CONTAINER_TAG: current-podified
    DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest
    DATAPLANE_DEFAULT_GW: 192.168.122.1
    DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null
    DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100%
    DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned
    DATAPLANE_NETWORKER_IP: 192.168.122.200
    DATAPLANE_NETWORK_INTERFACE_NAME: eth0
    DATAPLANE_NOVA_NFS_PATH: ''
    DATAPLANE_NTP_SERVER: pool.ntp.org
    DATAPLANE_PLAYBOOK: osp.edpm.download_cache
    DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9
    DATAPLANE_RUNNER_IMG: ''
    DATAPLANE_SERVER_ROLE: compute
    DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']'
    DATAPLANE_TIMEOUT: 30m
    DATAPLANE_TLS_ENABLED: 'true'
    DATAPLANE_TOTAL_NETWORKER_NODES: '1'
    DATAPLANE_TOTAL_NODES: '1'
    DBSERVICE: galera
    DESIGNATE: config/samples/designate_v1beta1_designate.yaml
    DESIGNATE_BRANCH: main
    DESIGNATE_COMMIT_HASH: ''
    DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml
    DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest
    DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml
    DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/tests/kuttl/tests
    DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests
    DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git
    DNSDATA: config/samples/network_v1beta1_dnsdata.yaml
    DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml
    DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml
    DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml
    DNS_DEPL_IMG: unused
    DNS_DOMAIN: localdomain
    DOWNLOAD_TOOLS_SELECTION: all
    EDPM_ATTACH_EXTNET: 'true'
    EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]'''
    EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]'''
    EDPM_COMPUTE_CELLS: '1'
    EDPM_COMPUTE_CEPH_ENABLED: 'true'
    EDPM_COMPUTE_CEPH_NOVA: 'true'
    EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true'
    EDPM_COMPUTE_SRIOV_ENABLED: 'true'
    EDPM_COMPUTE_SUFFIX: '0'
    EDPM_CONFIGURE_DEFAULT_ROUTE: 'true'
    EDPM_CONFIGURE_HUGEPAGES: 'false'
    EDPM_CONFIGURE_NETWORKING: 'true'
    EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra
    EDPM_NETWORKER_SUFFIX: '0'
    EDPM_TOTAL_NETWORKERS: '1'
    EDPM_TOTAL_NODES: '1'
    GALERA_REPLICAS: ''
    GENERATE_SSH_KEYS: 'true'
    GIT_CLONE_OPTS: ''
    GLANCE: config/samples/glance_v1beta1_glance.yaml
    GLANCEAPI_DEPL_IMG: unused
    GLANCE_BRANCH: main
    GLANCE_COMMIT_HASH: ''
    GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml
    GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest
    GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml
    GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests
    GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests
    GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git
    HEAT: config/samples/heat_v1beta1_heat.yaml
    HEATAPI_DEPL_IMG: unused
    HEATCFNAPI_DEPL_IMG: unused
    HEATENGINE_DEPL_IMG: unused
    HEAT_AUTH_ENCRYPTION_KEY: 767c3ed056cbaa3b9dfedb8c6f825bf0
    HEAT_BRANCH: main
    HEAT_COMMIT_HASH: ''
    HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml
    HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest
    HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml
    HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/tests/kuttl/tests
    HEAT_KUTTL_NAMESPACE: heat-kuttl-tests
    HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git
    HEAT_SERVICE_ENABLED: 'true'
    HORIZON: config/samples/horizon_v1beta1_horizon.yaml
    HORIZON_BRANCH: main
    HORIZON_COMMIT_HASH: ''
    HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml
    HORIZON_DEPL_IMG: unused
    HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest
    HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml
    HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/tests/kuttl/tests
    HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests
    HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git
    INFRA_BRANCH: main
    INFRA_COMMIT_HASH: ''
    INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest
    INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml
    INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/tests/kuttl/tests
    INFRA_KUTTL_NAMESPACE: infra-kuttl-tests
    INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git
    INSTALL_CERT_MANAGER: false
    INSTALL_NMSTATE: true || false
    INSTALL_NNCP: true || false
    INTERNALAPI_HOST_ROUTES: ''
    IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24
    IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64
    IPV6_LAB_LIBVIRT_STORAGE_POOL: default
    IPV6_LAB_MANAGE_FIREWALLD: 'true'
    IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24
    IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64
    IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router
    IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64
    IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24
    IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1
    IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3
    IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96
    IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false'
    IPV6_LAB_NETWORK_NAME: nat64
    IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48
    IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11
    IPV6_LAB_SNO_HOST_PREFIX: '64'
    IPV6_LAB_SNO_INSTANCE_NAME: sno
    IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64
    IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp
    IPV6_LAB_SNO_OCP_VERSION: latest-4.14
    IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112
    IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub
    IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab
    IRONIC: config/samples/ironic_v1beta1_ironic.yaml
    IRONICAPI_DEPL_IMG: unused
    IRONICCON_DEPL_IMG: unused
    IRONICINS_DEPL_IMG: unused
    IRONICNAG_DEPL_IMG: unused
    IRONICPXE_DEPL_IMG: unused
    IRONIC_BRANCH: main
    IRONIC_COMMIT_HASH: ''
    IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml
    IRONIC_IMAGE_TAG: release-24.1
    IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest
    IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml
    IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/tests/kuttl/tests
    IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests
    IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git
    KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml
    KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml
    KEYSTONEAPI_DEPL_IMG: unused
    KEYSTONE_BRANCH: main
    KEYSTONE_COMMIT_HASH: ''
    KEYSTONE_FEDERATION_CLIENT_SECRET: COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f
    KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack
    KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest
    KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml
    KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/tests/kuttl/tests
    KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests
    KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git
    KUBEADMIN_PWD: '12345678'
    LIBVIRT_SECRET: libvirt-secret
    LOKI_DEPLOY_MODE: openshift-network
    LOKI_DEPLOY_NAMESPACE: netobserv
    LOKI_DEPLOY_SIZE: 1x.demo
    LOKI_NAMESPACE: openshift-operators-redhat
    LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki
    LOKI_SUBSCRIPTION: loki-operator
    LVMS_CR: '1'
    MANILA: config/samples/manila_v1beta1_manila.yaml
    MANILAAPI_DEPL_IMG: unused
    MANILASCH_DEPL_IMG: unused
    MANILASHARE_DEPL_IMG: unused
    MANILA_BRANCH: main
    MANILA_COMMIT_HASH: ''
    MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml
    MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest
    MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml
    MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests
    MANILA_KUTTL_NAMESPACE: manila-kuttl-tests
    MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git
    MANILA_SERVICE_ENABLED: 'true'
    MARIADB: config/samples/mariadb_v1beta1_galera.yaml
    MARIADB_BRANCH: main
    MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/config.yaml
    MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/tests
    MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests
    MARIADB_COMMIT_HASH: ''
    MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml
    MARIADB_DEPL_IMG: unused
    MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest
    MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml
    MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/kuttl/tests
    MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests
    MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git
    MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml
    MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml
    MEMCACHED_DEPL_IMG: unused
    METADATA_SHARED_SECRET: '1234567842'
    METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90
    METALLB_POOL: 192.168.122.80-192.168.122.90
    MICROSHIFT: '0'
    NAMESPACE: openstack
    NETCONFIG: config/samples/network_v1beta1_netconfig.yaml
    NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml
    NETCONFIG_DEPL_IMG: unused
    NETOBSERV_DEPLOY_NAMESPACE: netobserv
    NETOBSERV_NAMESPACE: openshift-netobserv-operator
    NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net
    NETOBSERV_SUBSCRIPTION: netobserv-operator
    NETWORK_BGP: 'false'
    NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0
    NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0
    NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0
    NETWORK_ISOLATION: 'true'
    NETWORK_ISOLATION_INSTANCE_NAME: crc
    NETWORK_ISOLATION_IPV4: 'true'
    NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24
    NETWORK_ISOLATION_IPV4_NAT: 'true'
    NETWORK_ISOLATION_IPV6: 'false'
    NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64
    NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10
    NETWORK_ISOLATION_MAC: '52:54:00:11:11:10'
    NETWORK_ISOLATION_NETWORK_NAME: net-iso
    NETWORK_ISOLATION_NET_NAME: default
    NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true'
    NETWORK_MTU: '1500'
    NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0
    NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0
    NETWORK_STORAGE_MACVLAN: ''
    NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0
    NETWORK_VLAN_START: '20'
    NETWORK_VLAN_STEP: '1'
    NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml
    NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml
    NEUTRONAPI_DEPL_IMG: unused
    NEUTRON_BRANCH: main
    NEUTRON_COMMIT_HASH: ''
    NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest
    NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml
    NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests
    NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests
    NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git
    NFS_HOME: /home/nfs
    NMSTATE_NAMESPACE: openshift-nmstate
    NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8
    NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator
    NNCP_ADDITIONAL_HOST_ROUTES: ''
    NNCP_BGP_1_INTERFACE: enp7s0
    NNCP_BGP_1_IP_ADDRESS: 100.65.4.2
    NNCP_BGP_2_INTERFACE: enp8s0
    NNCP_BGP_2_IP_ADDRESS: 100.64.4.2
    NNCP_BRIDGE: ospbr
    NNCP_CLEANUP_TIMEOUT: 120s
    NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::'
    NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10'
    NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122
    NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10'
    NNCP_DNS_SERVER: 192.168.122.1
    NNCP_DNS_SERVER_IPV6: fd00:aaaa::1
    NNCP_GATEWAY: 192.168.122.1
    NNCP_GATEWAY_IPV6: fd00:aaaa::1
    NNCP_INTERFACE: enp6s0
    NNCP_NODES: ''
    NNCP_TIMEOUT: 240s
    NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml
    NOVA_BRANCH: main
    NOVA_COMMIT_HASH: ''
    NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml
    NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest
    NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git
    NUMBER_OF_INSTANCES: '1'
    OCP_NETWORK_NAME: crc
    OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml
    OCTAVIA_BRANCH: main
    OCTAVIA_COMMIT_HASH: ''
    OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml
    OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest
    OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml
    OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/tests/kuttl/tests
    OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests
    OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git
    OKD: 'false'
    OPENSTACK_BRANCH: main
    OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest
    OPENSTACK_COMMIT_HASH: ''
    OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml
    OPENSTACK_CRDS_DIR: openstack_crds
    OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml
    OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest
    OPENSTACK_K8S_BRANCH: main
    OPENSTACK_K8S_TAG: latest
    OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml
    OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/tests/kuttl/tests
    OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests
    OPENSTACK_NEUTRON_CUSTOM_CONF: ''
    OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git
    OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest
    OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator
    OPERATOR_CHANNEL: ''
    OPERATOR_NAMESPACE: openstack-operators
    OPERATOR_SOURCE: ''
    OPERATOR_SOURCE_NAMESPACE: ''
    OUT: /home/zuul/ci-framework-data/artifacts/manifests
    OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm
    OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml
    OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml
    OVNCONTROLLER_NMAP: 'true'
    OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml
    OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml
    OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml
    OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml
    OVN_BRANCH: main
    OVN_COMMIT_HASH: ''
    OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest
    OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml
    OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/tests/kuttl/tests
    OVN_KUTTL_NAMESPACE: ovn-kuttl-tests
    OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git
    PASSWORD: '12345678'
    PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml
    PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml
    PLACEMENTAPI_DEPL_IMG: unused
    PLACEMENT_BRANCH: main
    PLACEMENT_COMMIT_HASH: ''
    PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest
    PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml
    PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/tests/kuttl/tests
    PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests
    PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git
    PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/pull-secret.txt
    RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml
    RABBITMQ_BRANCH: patches
    RABBITMQ_COMMIT_HASH: ''
    RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml
    RABBITMQ_DEPL_IMG: unused
    RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest
    RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git
    REDHAT_OPERATORS: 'false'
    REDIS: config/samples/redis_v1beta1_redis.yaml
    REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml
    REDIS_DEPL_IMG: unused
    RH_REGISTRY_PWD: ''
    RH_REGISTRY_USER: ''
    SECRET: osp-secret
    SG_CORE_DEPL_IMG: unused
    STANDALONE_COMPUTE_DRIVER: libvirt
    STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0
    STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0
    STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0
    STANDALONE_STORAGE_NET_PREFIX: 172.18.0
    STANDALONE_TENANT_NET_PREFIX: 172.19.0
    STORAGEMGMT_HOST_ROUTES: ''
    STORAGE_CLASS: local-storage
    STORAGE_HOST_ROUTES: ''
    SWIFT: config/samples/swift_v1beta1_swift.yaml
    SWIFT_BRANCH: main
    SWIFT_COMMIT_HASH: ''
    SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml
    SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest
    SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml
    SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/tests/kuttl/tests
    SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests
    SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git
    TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml
    TELEMETRY_BRANCH: main
    TELEMETRY_COMMIT_HASH: ''
    TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml
    TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest
    TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator
    TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml
    TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests
    TELEMETRY_KUTTL_RELPATH: tests/kuttl/suites
    TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git
    TENANT_HOST_ROUTES: ''
    TIMEOUT: 300s
    TLS_ENABLED: 'false'
    WATCHER_BRANCH: ''
    WATCHER_REPO: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator
    tripleo_deploy: 'export REGISTRY_USER:'
cifmw_install_yamls_environment:
    BMO_SETUP: false
    CHECKOUT_FROM_OPENSTACK_REF: 'true'
    INSTALL_CERT_MANAGER: false
    KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig
    OPENSTACK_K8S_BRANCH: main
    OUT: /home/zuul/ci-framework-data/artifacts/manifests
    OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm
    WATCHER_BRANCH: ''
    WATCHER_REPO: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator
 encoding: base64 failed: false invocation: module_args: src: /home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml item: install-yamls-params.yml source: /home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml - ansible_loop_var: item changed: false content: Y2lmbXdfb3BlbnNoaWZ0X2FwaTogYXBpLmNyYy50ZXN0aW5nOjY0NDMKY2lmbXdfb3BlbnNoaWZ0X2NvbnRleHQ6IGRlZmF1bHQvYXBpLWNyYy10ZXN0aW5nOjY0NDMva3ViZWFkbWluCmNpZm13X29wZW5zaGlmdF9rdWJlY29uZmlnOiAvaG9tZS96dXVsLy5jcmMvbWFjaGluZXMvY3JjL2t1YmVjb25maWcKY2lmbXdfb3BlbnNoaWZ0X3Rva2VuOiBzaGEyNTZ+UDN3U2JBY25sbUVoclB6eHJuMDUwUjk0SDhBSnZGU25OY1lmOGJQT0EzYwpjaWZtd19vcGVuc2hpZnRfdXNlcjoga3ViZWFkbWluCg== encoding: base64 failed: false invocation: module_args: src: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml item: openshift-login-params.yml source: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml - ansible_loop_var: item changed: false content: cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw
cifmw_build_images_output: {}
cifmw_dlrn_report_result: false
cifmw_edpm_telemetry_enabled_exporters:
- podman_exporter
- openstack_network_exporter
cifmw_extras:
- '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework''].
    src_dir }}/scenarios/centos-9/multinode-ci.yml'
- '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework''].
    src_dir }}/scenarios/centos-9/horizon.yml'
- '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator''].
    src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml'
- '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator''].
    src_dir }}/ci/tests/watcher-tempest.yml'
cifmw_openshift_api: api.crc.testing:6443
cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig'
cifmw_openshift_password: '123456789'
cifmw_openshift_skip_tls_verify: true
cifmw_openshift_user: kubeadmin
cifmw_operator_build_output:
    operators:
        openstack-operator:
            git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9
            git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator
            image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9
            image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9
            image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9
        watcher-operator:
            git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d
            git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator
            image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d
            image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d
            image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d
cifmw_test_operator_tempest_external_plugin:
-   changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e
    changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin
    repository: https://opendev.org/openstack/watcher-tempest-plugin.git
cifmw_test_operator_tempest_image_tag: watcher_latest
cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'')
    | last }}'
cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'')
    | first }}'
cifmw_update_containers_openstack: false
cifmw_update_containers_org: podified-epoxy-centos9
cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'')
    | first }}'
cifmw_update_containers_tag: watcher_latest
cifmw_update_containers_watcher: true
cifmw_use_libvirt: false
cifmw_zuul_target_host: controller
content_provider_dlrn_md5_hash: ''
content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9
content_provider_registry_ip: 38.102.83.53
crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''')
    }}'
crc_ci_bootstrap_networking:
    instances:
        compute-0:
            networks:
                default:
                    ip: 192.168.122.100
                internal-api:
                    config_nm: false
                    ip: 172.17.0.100
                storage:
                    config_nm: false
                    ip: 172.18.0.100
                tenant:
                    config_nm: false
                    ip: 172.19.0.100
        compute-1:
            networks:
                default:
                    ip: 192.168.122.101
                internal-api:
                    config_nm: false
                    ip: 172.17.0.101
                storage:
                    config_nm: false
                    ip: 172.18.0.101
                tenant:
                    config_nm: false
                    ip: 172.19.0.101
        controller:
            networks:
                default:
                    ip: 192.168.122.11
        crc:
            networks:
                default:
                    ip: 192.168.122.10
                internal-api:
                    ip: 172.17.0.5
                storage:
                    ip: 172.18.0.5
                tenant:
                    ip: 172.19.0.5
    networks:
        default:
            mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}'
            range: 192.168.122.0/24
            router_net: ''
            transparent: true
        internal-api:
            range: 172.17.0.0/24
            vlan: 20
        storage:
            range: 172.18.0.0/24
            vlan: 21
        tenant:
            range: 172.19.0.0/24
            vlan: 22
enable_ramdisk: true
fetch_dlrn_hash: false
push_registry: quay.rdoproject.org
quay_login_secret_name: quay_nextgen_zuulgithubci
registry_login_enabled: true
watcher_scenario: edpm-no-notifications
watcher_services_tag: watcher_latest
watcher_tempest_max_microversion: '1.4'
zuul:
    _inheritance_path:
    - '<Job base-minimal branches: None source: config/zuul.d/jobs.yaml@master#24>'
    - '<Job base-crc-cloud branches: None source: config/zuul.d/_jobs-crc.yaml@master#235>'
    - '<Job cifmw-podified-multinode-edpm-base-crc branches: None source: openstack-k8s-operators/ci-framework/zuul.d/base.yaml@main#123>'
    - '<Job podified-multinode-edpm-deployment-crc branches: None source: openstack-k8s-operators/ci-framework/zuul.d/edpm_multinode.yaml@main#317>'
    - '<Job podified-multinode-edpm-deployment-crc-2comp branches: None source: openstack-k8s-operators/ci-framework/zuul.d/edpm_multinode.yaml@main#2>'
    - '<Job watcher-operator-base branches: {MatchAny:{BranchMatcher:master},{BranchMatcher:main}}
        source: openstack-k8s-operators/watcher-operator/.zuul.yaml@main#15>'
    - '<Job watcher-operator-validation-base branches: {MatchAny:{BranchMatcher:master},{BranchMatcher:main}}
        source: openstack-k8s-operators/watcher-operator/.zuul.yaml@main#75>'
    - '<Job watcher-operator-validation-epoxy branches: {MatchAny:{BranchMatcher:master},{BranchMatcher:main}}
        source: openstack-k8s-operators/watcher-operator/.zuul.yaml@main#150>'
    - '<Job watcher-operator-validation-epoxy-ocp4-16 branches: {MatchAny:{BranchMatcher:master},{BranchMatcher:main}}
        source: openstack-k8s-operators/watcher-operator/.zuul.yaml@main#192>'
    - '<Job watcher-operator-validation-epoxy-ocp4-16 branches: None source: openstack-k8s-operators/watcher-operator/.zuul.yaml@main#284>'
    ansible_version: '8'
    attempts: 1
    branch: main
    build: 9ce4c11f9f6a4904bf6148a8276a3232
    build_refs:
    -   branch: main
        change: '287'
        change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287
        commit_id: 14377136e67c9cd67507a059bfde2f19f140387d
        patchset: 14377136e67c9cd67507a059bfde2f19f140387d
        project:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/watcher-operator
            name: openstack-k8s-operators/watcher-operator
            short_name: watcher-operator
        src_dir: src/github.com/openstack-k8s-operators/watcher-operator
        topic: null
    buildset: f9416ac601264548b137ce1f44fe627c
    buildset_refs:
    -   branch: main
        change: '287'
        change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287
        commit_id: 14377136e67c9cd67507a059bfde2f19f140387d
        patchset: 14377136e67c9cd67507a059bfde2f19f140387d
        project:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/watcher-operator
            name: openstack-k8s-operators/watcher-operator
            short_name: watcher-operator
        src_dir: src/github.com/openstack-k8s-operators/watcher-operator
        topic: null
    change: '287'
    change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287
    child_jobs: []
    commit_id: 14377136e67c9cd67507a059bfde2f19f140387d
    event_id: 7dde6e80-a2f2-11f0-83f1-b4af7183f5ac
    executor:
        hostname: ze01.softwarefactory-project.io
        inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/inventory.yaml
        log_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/logs
        result_data_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/results.json
        src_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/src
        work_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work
    items:
    -   branch: main
        change: '287'
        change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287
        commit_id: 14377136e67c9cd67507a059bfde2f19f140387d
        patchset: 14377136e67c9cd67507a059bfde2f19f140387d
        project:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/watcher-operator
            name: openstack-k8s-operators/watcher-operator
            short_name: watcher-operator
            src_dir: src/github.com/openstack-k8s-operators/watcher-operator
        topic: null
    job: watcher-operator-validation-epoxy-ocp4-16
    jobtags: []
    max_attempts: 1
    message: W1dJUF0gTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIHRvIGNvbnRyb2xwbGFuZSBsZXZlbAoKTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIGZyb20gZW5hYmxpbmcgYXQgbm92YS9jaW5kZXIvd2F0Y2hlciBsZXZlbCB0byBvcGVuc3RhY2sgY29udHJvbHBsYW5lIGxldmVsIGFmdGVyIHRoYXQgdXNhZ2UgaXMgYXZhaWxhYmxlIHNpbmNlIGh0dHBzOi8vZ2l0aHViLmNvbS9vcGVuc3RhY2stazhzLW9wZXJhdG9ycy9vcGVuc3RhY2stb3BlcmF0b3IvcHVsbC8xNTkx
    patchset: 14377136e67c9cd67507a059bfde2f19f140387d
    pipeline: github-check
    playbook_context:
        playbook_projects:
            trusted/project_0/review.rdoproject.org/config:
                canonical_name: review.rdoproject.org/config
                checkout: master
                commit: 941f6f7666fdff0145523beb29ceda8db25c234c
            trusted/project_1/opendev.org/zuul/zuul-jobs:
                canonical_name: opendev.org/zuul/zuul-jobs
                checkout: master
                commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df
            trusted/project_2/review.rdoproject.org/rdo-jobs:
                canonical_name: review.rdoproject.org/rdo-jobs
                checkout: master
                commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4
            trusted/project_3/github.com/openstack-k8s-operators/ci-framework:
                canonical_name: github.com/openstack-k8s-operators/ci-framework
                checkout: main
                commit: 35b8986b014c5316d873d58c20dfc131ae44aa83
            untrusted/project_0/github.com/openstack-k8s-operators/ci-framework:
                canonical_name: github.com/openstack-k8s-operators/ci-framework
                checkout: main
                commit: 35b8986b014c5316d873d58c20dfc131ae44aa83
            untrusted/project_1/review.rdoproject.org/config:
                canonical_name: review.rdoproject.org/config
                checkout: master
                commit: 941f6f7666fdff0145523beb29ceda8db25c234c
            untrusted/project_2/opendev.org/zuul/zuul-jobs:
                canonical_name: opendev.org/zuul/zuul-jobs
                checkout: master
                commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df
            untrusted/project_3/review.rdoproject.org/rdo-jobs:
                canonical_name: review.rdoproject.org/rdo-jobs
                checkout: master
                commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4
        playbooks:
        -   path: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/edpm/run.yml
            roles:
            -   checkout: main
                checkout_description: playbook branch
                link_name: ansible/playbook_0/role_0/ci-framework
                link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework
                role_path: ansible/playbook_0/role_0/ci-framework/roles
            -   checkout: master
                checkout_description: project default branch
                link_name: ansible/playbook_0/role_1/config
                link_target: untrusted/project_1/review.rdoproject.org/config
                role_path: ansible/playbook_0/role_1/config/roles
            -   checkout: master
                checkout_description: project default branch
                link_name: ansible/playbook_0/role_2/zuul-jobs
                link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs
                role_path: ansible/playbook_0/role_2/zuul-jobs/roles
            -   checkout: master
                checkout_description: project default branch
                link_name: ansible/playbook_0/role_3/rdo-jobs
                link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs
                role_path: ansible/playbook_0/role_3/rdo-jobs/roles
    post_review: false
    project:
        canonical_hostname: github.com
        canonical_name: github.com/openstack-k8s-operators/watcher-operator
        name: openstack-k8s-operators/watcher-operator
        short_name: watcher-operator
        src_dir: src/github.com/openstack-k8s-operators/watcher-operator
    projects:
        github.com/crc-org/crc-cloud:
            canonical_hostname: github.com
            canonical_name: github.com/crc-org/crc-cloud
            checkout: main
            checkout_description: project override ref
            commit: f6ed2f2d118884a075895bbf954ff6000e540430
            name: crc-org/crc-cloud
            required: true
            short_name: crc-cloud
            src_dir: src/github.com/crc-org/crc-cloud
        github.com/openstack-k8s-operators/ci-framework:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/ci-framework
            checkout: main
            checkout_description: zuul branch
            commit: 35b8986b014c5316d873d58c20dfc131ae44aa83
            name: openstack-k8s-operators/ci-framework
            required: true
            short_name: ci-framework
            src_dir: src/github.com/openstack-k8s-operators/ci-framework
        github.com/openstack-k8s-operators/edpm-ansible:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/edpm-ansible
            checkout: main
            checkout_description: zuul branch
            commit: 95aa63de3182faad63a69301d101debad3efc936
            name: openstack-k8s-operators/edpm-ansible
            required: true
            short_name: edpm-ansible
            src_dir: src/github.com/openstack-k8s-operators/edpm-ansible
        github.com/openstack-k8s-operators/infra-operator:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/infra-operator
            checkout: main
            checkout_description: zuul branch
            commit: 2b5048bbcae44dfeaacbb43830318ca45c13f182
            name: openstack-k8s-operators/infra-operator
            required: true
            short_name: infra-operator
            src_dir: src/github.com/openstack-k8s-operators/infra-operator
        github.com/openstack-k8s-operators/install_yamls:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/install_yamls
            checkout: main
            checkout_description: zuul branch
            commit: bb26118ddc70016cbd2118a0b0a35d5f6ab9c343
            name: openstack-k8s-operators/install_yamls
            required: true
            short_name: install_yamls
            src_dir: src/github.com/openstack-k8s-operators/install_yamls
        github.com/openstack-k8s-operators/openstack-baremetal-operator:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator
            checkout: main
            checkout_description: zuul branch
            commit: 3bf7652f010ead15ac2d2fec7e3b71c442b8fb8d
            name: openstack-k8s-operators/openstack-baremetal-operator
            required: true
            short_name: openstack-baremetal-operator
            src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator
        github.com/openstack-k8s-operators/openstack-must-gather:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/openstack-must-gather
            checkout: main
            checkout_description: zuul branch
            commit: 748dff8508cbb49e00426d46a4487b9f4c0b0096
            name: openstack-k8s-operators/openstack-must-gather
            required: true
            short_name: openstack-must-gather
            src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather
        github.com/openstack-k8s-operators/openstack-operator:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/openstack-operator
            checkout: main
            checkout_description: zuul branch
            commit: 245af87e94976809f2023f59c19dffb95df97ed9
            name: openstack-k8s-operators/openstack-operator
            required: true
            short_name: openstack-operator
            src_dir: src/github.com/openstack-k8s-operators/openstack-operator
        github.com/openstack-k8s-operators/repo-setup:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/repo-setup
            checkout: main
            checkout_description: zuul branch
            commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f
            name: openstack-k8s-operators/repo-setup
            required: true
            short_name: repo-setup
            src_dir: src/github.com/openstack-k8s-operators/repo-setup
        github.com/openstack-k8s-operators/watcher-operator:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/watcher-operator
            checkout: main
            checkout_description: zuul branch
            commit: 14377136e67c9cd67507a059bfde2f19f140387d
            name: openstack-k8s-operators/watcher-operator
            required: false
            short_name: watcher-operator
            src_dir: src/github.com/openstack-k8s-operators/watcher-operator
        opendev.org/zuul/zuul-jobs:
            canonical_hostname: opendev.org
            canonical_name: opendev.org/zuul/zuul-jobs
            checkout: master
            checkout_description: project default branch
            commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df
            name: zuul/zuul-jobs
            required: true
            short_name: zuul-jobs
            src_dir: src/opendev.org/zuul/zuul-jobs
        review.rdoproject.org/config:
            canonical_hostname: review.rdoproject.org
            canonical_name: review.rdoproject.org/config
            checkout: master
            checkout_description: project default branch
            commit: 941f6f7666fdff0145523beb29ceda8db25c234c
            name: config
            required: true
            short_name: config
            src_dir: src/review.rdoproject.org/config
    ref: refs/pull/287/head
    resources: {}
    tenant: rdoproject.org
    timeout: 10800
    topic: null
    voting: true
zuul_log_collection: false
 encoding: base64 failed: false invocation: module_args: src: /home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml item: zuul-params.yml source: /home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml skipped: false ansible_all_ipv4_addresses: - 38.102.83.51 ansible_all_ipv6_addresses: - fe80::f816:3eff:fe6f:820 ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_collection_name: null ansible_config_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2025-10-06' day: '06' epoch: '1759785143' epoch_int: '1759785143' hour: '21' iso8601: '2025-10-06T21:12:23Z' iso8601_basic: 20251006T211223407560 iso8601_basic_short: 20251006T211223 iso8601_micro: '2025-10-06T21:12:23.407560Z' minute: '12' month: '10' second: '23' time: '21:12:23' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' ansible_default_ipv4: address: 38.102.83.51 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:6f:08:20 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_dependent_role_names: [] ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-10-06-20-56-29-00 vda1: - 1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-56-29-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 1631a6ad-43b8-436d-ae76-16fa14b94458 sectors: '83883999' sectorsize: 512 size: 40.00 GB start: '2048' uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '83886080' sectorsize: '512' size: 40.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: CentOS ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/centos-release ansible_distribution_file_variety: CentOS ansible_distribution_major_version: '9' ansible_distribution_release: Stream ansible_distribution_version: '9' ansible_dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 35354 22 SSH_CONNECTION: 38.102.83.114 35354 38.102.83.51 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '12' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f ansible_eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.51 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe6f:820 prefix: '64' scope: link macaddress: fa:16:3e:6f:08:20 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.51 all_ipv6_addresses: - fe80::f816:3eff:fe6f:820 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 crc_ci_bootstrap_instance_default_net_config: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: ip: 172.17.0.5 - key: storage value: ip: 172.18.0.5 - key: tenant value: ip: 172.19.0.5 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2025-10-06T21:00:58Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-10.openstacklocal. hostname: host-192-168-122-10 ip_address: 192.168.122.10 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.10 subnet_id: 139175d8-a9e6-4e3b-931b-a0af30583742 hardware_offload_type: null hints: '' id: 81f69e80-a4c7-43d8-ad59-1b24fcb3acbf ip_allocation: immediate mac_address: fa:16:3e:36:76:9b name: crc-bfd057b4-b43d-4dc5-bc10-e91bf10a649b network_id: febb7485-9e12-4711-8dc7-e207293e25de numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2025-10-06T21:00:58Z' crc_ci_bootstrap_network_name: zuul-ci-net-9ce4c11f crc_ci_bootstrap_networks_out: compute-0: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:1a:6b:7b mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:ec:df:aa mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:e1:d1:4a mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:6e:fd:3e mtu: '1496' parent_iface: eth1 vlan: 22 compute-1: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.101/24 mac: fa:16:3e:cb:47:1e mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.101/24 mac: 52:54:00:69:15:f1 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.101/24 mac: 52:54:00:c3:9b:d0 mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.101/24 mac: 52:54:00:1d:8d:c8 mtu: '1496' parent_iface: eth1 vlan: 22 controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:fc:47:4f mtu: '1500' crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:36:76:9b mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:aa:79:c3 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:bd:b2:92 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:86:1f:43 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-10-06T20:59:40Z' description: '' dns_domain: '' id: febb7485-9e12-4711-8dc7-e207293e25de ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: true l2_adjacency: true mtu: 1500 name: zuul-ci-net-9ce4c11f port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2025-10-06T20:59:40Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-10-06T20:59:45Z' description: '' enable_ndp_proxy: null external_gateway_info: null flavor_id: null id: 4c5c07da-6180-4e43-8bfc-7faf50c6c9a5 name: zuul-ci-subnet-router-9ce4c11f project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 1 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2025-10-06T20:59:45Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2025-10-06T20:59:43Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: 139175d8-a9e6-4e3b-931b-a0af30583742 ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-9ce4c11f network_id: febb7485-9e12-4711-8dc7-e207293e25de project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2025-10-06T20:59:43Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-9ce4c11f crc_ci_bootstrap_subnet_name: zuul-ci-subnet-9ce4c11f date_time: date: '2025-10-06' day: '06' epoch: '1759785143' epoch_int: '1759785143' hour: '21' iso8601: '2025-10-06T21:12:23Z' iso8601_basic: 20251006T211223407560 iso8601_basic_short: 20251006T211223 iso8601_micro: '2025-10-06T21:12:23.407560Z' minute: '12' month: '10' second: '23' time: '21:12:23' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' default_ipv4: address: 38.102.83.51 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:6f:08:20 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-10-06-20-56-29-00 vda1: - 1631a6ad-43b8-436d-ae76-16fa14b94458 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-56-29-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 1631a6ad-43b8-436d-ae76-16fa14b94458 sectors: '83883999' sectorsize: 512 size: 40.00 GB start: '2048' uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '83886080' sectorsize: '512' size: 40.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 35354 22 SSH_CONNECTION: 38.102.83.114 35354 38.102.83.51 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '12' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.51 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe6f:820 prefix: '64' scope: link macaddress: fa:16:3e:6f:08:20 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: controller gather_subset: - min hostname: controller hostnqn: nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0 interfaces: - eth0 - lo is_chroot: false iscsi_iqn: '' kernel: 5.14.0-620.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.03 1m: 0.14 5m: 0.09 locally_reachable_ips: ipv4: - 38.102.83.51 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe6f:820 lsb: {} lvm: N/A machine: x86_64 machine_id: 42833e1b511a402df82cb9cb2fc36491 memfree_mb: 3293 memory_mb: nocache: free: 3444 used: 211 real: free: 3293 total: 3655 used: 362 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 3655 module_setup: true mounts: - block_available: 9966449 block_size: 4096 block_total: 10469115 block_used: 502666 device: /dev/vda1 fstype: xfs inode_available: 20916775 inode_total: 20970992 inode_used: 54217 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 40822575104 size_total: 42881495040 uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 nodename: controller os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 2 processor_nproc: 2 processor_threads_per_core: 1 processor_vcpus: 2 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.2.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 23 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 23 - final - 0 python_version: 3.9.23 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDpEwzeDGLwNlfP3Up6vCxCw7kSSu0AiDUvDH/J+EepxMPGLLpzT0wX+lEXL9GArqfNU/UBUmiiwh9dZO9tQ5bk= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIPrlPStzDnFCaI6YFfPj0aQKsBPAAZFkT8awb2RrAe7g ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQCsnSYzB9ciMqmgm0l3UC4GvkzqKIRU4HQjm2Wmmz4ONumnWKBZyfZPUd4C1zWgntSd7/HfwyQS5GOnhobA5K/1o855yq/Qr6a2M0JVvxnLdxB089mymIDZ9Z5iXDsVHJNPHKuz3pjoZDbA5XzpQPsDbEeMHpBd0Yz5DQaAPYYN1wg4Wtq6PK86i2jV8qtVH7OnCkn06futt/HtJ4eADwKZV6cutqDHmuTqXwagLJ7PWTm0H9xAYR/Tsgd28krH/EIdcyHBACqdSrk6FWPOdZ1Q5PjVC0ZOHemQeiRhmYW5NgxnnEgSmoTMCyMKRYbVcMYeHKRBg/rXhLSbymoU+eF+Kza486CELgT9KG4Z0NTOmyzNu1ee8G0ZOaowjIQ8Gr6e15WUMLbskShDGqXlAnaRHOAQhBSGCkt0N9KMyGaBdFYVzJOgqi1erPoCN1pLe7Ljr44blAH6Yvp9H8Ji4mLuVYB7PmDHL0Mb4zkjqi/MU9Okx1escBZI4ASrkaXkp18= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 135 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack zuul_change_list: - watcher-operator ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: controller ansible_host: 38.102.83.51 ansible_hostname: controller ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0 ansible_interfaces: - eth0 - lo ansible_inventory_sources: - /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/inventory.yaml ansible_is_chroot: false ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-620.el9.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.03 1m: 0.14 5m: 0.09 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.51 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe6f:820 ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 42833e1b511a402df82cb9cb2fc36491 ansible_memfree_mb: 3293 ansible_memory_mb: nocache: free: 3444 used: 211 real: free: 3293 total: 3655 used: 362 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 3655 ansible_mounts: - block_available: 9966449 block_size: 4096 block_total: 10469115 block_used: 502666 device: /dev/vda1 fstype: xfs inode_available: 20916775 inode_total: 20970992 inode_used: 54217 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 40822575104 size_total: 42881495040 uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_nodename: controller ansible_os_family: RedHat ansible_parent_role_names: - cifmw_setup ansible_parent_role_paths: - /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/roles/cifmw_setup ansible_pkg_mgr: dnf ansible_play_batch: &id002 - controller ansible_play_hosts: - controller ansible_play_hosts_all: - compute-0 - compute-1 - controller - crc ansible_play_name: Run ci/playbooks/e2e-collect-logs.yml ansible_play_role_names: &id003 - os_must_gather - artifacts - env_op_images - cifmw_setup ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 2 ansible_processor_nproc: 2 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 2 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.2.1 ansible_python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 23 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 23 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.23 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_role_name: artifacts ansible_role_names: - os_must_gather - env_op_images - cifmw_setup - artifacts ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDpEwzeDGLwNlfP3Up6vCxCw7kSSu0AiDUvDH/J+EepxMPGLLpzT0wX+lEXL9GArqfNU/UBUmiiwh9dZO9tQ5bk= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIPrlPStzDnFCaI6YFfPj0aQKsBPAAZFkT8awb2RrAe7g ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQCsnSYzB9ciMqmgm0l3UC4GvkzqKIRU4HQjm2Wmmz4ONumnWKBZyfZPUd4C1zWgntSd7/HfwyQS5GOnhobA5K/1o855yq/Qr6a2M0JVvxnLdxB089mymIDZ9Z5iXDsVHJNPHKuz3pjoZDbA5XzpQPsDbEeMHpBd0Yz5DQaAPYYN1wg4Wtq6PK86i2jV8qtVH7OnCkn06futt/HtJ4eADwKZV6cutqDHmuTqXwagLJ7PWTm0H9xAYR/Tsgd28krH/EIdcyHBACqdSrk6FWPOdZ1Q5PjVC0ZOHemQeiRhmYW5NgxnnEgSmoTMCyMKRYbVcMYeHKRBg/rXhLSbymoU+eF+Kza486CELgT9KG4Z0NTOmyzNu1ee8G0ZOaowjIQ8Gr6e15WUMLbskShDGqXlAnaRHOAQhBSGCkt0N9KMyGaBdFYVzJOgqi1erPoCN1pLe7Ljr44blAH6Yvp9H8Ji4mLuVYB7PmDHL0Mb4zkjqi/MU9Okx1escBZI4ASrkaXkp18= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 135 ansible_user: zuul ansible_user_dir: /home/zuul ansible_user_gecos: '' ansible_user_gid: 1000 ansible_user_id: zuul ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_artifacts_basedir: '{{ cifmw_basedir | default(ansible_user_dir ~ ''/ci-framework-data'') }}' cifmw_artifacts_crc_host: api.crc.testing cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_artifacts_crc_sshkey_ed25519: ~/.crc/machines/crc/id_ed25519 cifmw_artifacts_crc_user: core cifmw_artifacts_gather_logs: true cifmw_artifacts_mask_logs: true cifmw_basedir: /home/zuul/ci-framework-data cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_env_op_images_dir: '{{ cifmw_basedir | default(ansible_user_dir ~ ''/ci-framework-data'') }}' cifmw_env_op_images_dryrun: false cifmw_env_op_images_file: operator_images.yaml cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_os_must_gather_additional_namespaces: kuttl,openshift-storage,openshift-marketplace,openshift-operators,sushy-emulator,tobiko cifmw_os_must_gather_dump_db: ALL cifmw_os_must_gather_host_network: false cifmw_os_must_gather_image: quay.io/openstack-k8s-operators/openstack-must-gather:latest cifmw_os_must_gather_image_push: true cifmw_os_must_gather_image_registry: quay.rdoproject.org/openstack-k8s-operators cifmw_os_must_gather_namespaces: - openstack-operators - openstack - baremetal-operator-system - openshift-machine-api - cert-manager - openshift-nmstate - openshift-marketplace - metallb-system - crc-storage cifmw_os_must_gather_output_dir: '{{ cifmw_basedir | default(ansible_user_dir ~ ''/ci-framework-data'') }}' cifmw_os_must_gather_repo_path: '{{ ansible_user_dir }}/src/github.com/openstack-k8s-operators/openstack-must-gather' cifmw_os_must_gather_timeout: 10m cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cifmw_status: changed: false failed: false stat: atime: 1759784988.6636567 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: binary ctime: 1759784979.0963836 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 8535629 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1759784979.0963836 nlink: 21 path: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 4096 uid: 1000 version: '2141595077' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true cifmw_success_flag: changed: false failed: false stat: exists: false cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_instance_default_net_config: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: ip: 172.17.0.5 - key: storage value: ip: 172.18.0.5 - key: tenant value: ip: 172.19.0.5 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2025-10-06T21:00:58Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-10.openstacklocal. hostname: host-192-168-122-10 ip_address: 192.168.122.10 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.10 subnet_id: 139175d8-a9e6-4e3b-931b-a0af30583742 hardware_offload_type: null hints: '' id: 81f69e80-a4c7-43d8-ad59-1b24fcb3acbf ip_allocation: immediate mac_address: fa:16:3e:36:76:9b name: crc-bfd057b4-b43d-4dc5-bc10-e91bf10a649b network_id: febb7485-9e12-4711-8dc7-e207293e25de numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2025-10-06T21:00:58Z' crc_ci_bootstrap_network_name: zuul-ci-net-9ce4c11f crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 crc_ci_bootstrap_networks_out: compute-0: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:1a:6b:7b mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:ec:df:aa mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:e1:d1:4a mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:6e:fd:3e mtu: '1496' parent_iface: eth1 vlan: 22 compute-1: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.101/24 mac: fa:16:3e:cb:47:1e mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.101/24 mac: 52:54:00:69:15:f1 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.101/24 mac: 52:54:00:c3:9b:d0 mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.101/24 mac: 52:54:00:1d:8d:c8 mtu: '1496' parent_iface: eth1 vlan: 22 controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:fc:47:4f mtu: '1500' crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:36:76:9b mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:aa:79:c3 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:bd:b2:92 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:86:1f:43 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-10-06T20:59:40Z' description: '' dns_domain: '' id: febb7485-9e12-4711-8dc7-e207293e25de ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: true l2_adjacency: true mtu: 1500 name: zuul-ci-net-9ce4c11f port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2025-10-06T20:59:40Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-10-06T20:59:45Z' description: '' enable_ndp_proxy: null external_gateway_info: null flavor_id: null id: 4c5c07da-6180-4e43-8bfc-7faf50c6c9a5 name: zuul-ci-subnet-router-9ce4c11f project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 1 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2025-10-06T20:59:45Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2025-10-06T20:59:43Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: 139175d8-a9e6-4e3b-931b-a0af30583742 ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-9ce4c11f network_id: febb7485-9e12-4711-8dc7-e207293e25de project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2025-10-06T20:59:43Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-9ce4c11f crc_ci_bootstrap_subnet_name: zuul-ci-subnet-9ce4c11f discovered_interpreter_python: /usr/bin/python3 enable_ramdisk: true environment: - ANSIBLE_LOG_PATH: '{{ ansible_user_dir }}/ci-framework-data/logs/e2e-collect-logs-must-gather.log' fetch_dlrn_hash: false gather_subset: - min group_names: - ungrouped groups: all: - compute-0 - compute-1 - controller - crc computes: - compute-0 - compute-1 ocps: - crc ungrouped: &id001 - controller zuul_unreachable: [] hostvars: compute-0: ansible_all_ipv4_addresses: - 38.102.83.32 ansible_all_ipv6_addresses: - fe80::f816:3eff:fe0e:bbd6 ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_config_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2025-10-06' day: '06' epoch: '1759784333' epoch_int: '1759784333' hour: '16' iso8601: '2025-10-06T20:58:53Z' iso8601_basic: 20251006T165853820120 iso8601_basic_short: 20251006T165853 iso8601_micro: '2025-10-06T20:58:53.820120Z' minute: '58' month: '10' second: '53' time: '16:58:53' tz: EDT tz_dst: EDT tz_offset: '-0400' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' ansible_default_ipv4: address: 38.102.83.32 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:0e:bb:d6 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-10-06-20-54-49-00 vda1: - 1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-54-49-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 1631a6ad-43b8-436d-ae76-16fa14b94458 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: CentOS ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/centos-release ansible_distribution_file_variety: CentOS ansible_distribution_major_version: '9' ansible_distribution_release: Stream ansible_distribution_version: '9' ansible_dns: nameservers: - 199.204.44.24 - 199.204.47.54 search: - novalocal ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 46990 22 SSH_CONNECTION: 38.102.83.114 46990 38.102.83.32 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '1' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f ansible_eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.32 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe0e:bbd6 prefix: '64' scope: link macaddress: fa:16:3e:0e:bb:d6 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.32 all_ipv6_addresses: - fe80::f816:3eff:fe0e:bbd6 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 date_time: date: '2025-10-06' day: '06' epoch: '1759784333' epoch_int: '1759784333' hour: '16' iso8601: '2025-10-06T20:58:53Z' iso8601_basic: 20251006T165853820120 iso8601_basic_short: 20251006T165853 iso8601_micro: '2025-10-06T20:58:53.820120Z' minute: '58' month: '10' second: '53' time: '16:58:53' tz: EDT tz_dst: EDT tz_offset: '-0400' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' default_ipv4: address: 38.102.83.32 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:0e:bb:d6 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-10-06-20-54-49-00 vda1: - 1631a6ad-43b8-436d-ae76-16fa14b94458 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-54-49-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 1631a6ad-43b8-436d-ae76-16fa14b94458 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 199.204.44.24 - 199.204.47.54 search: - novalocal domain: '' effective_group_id: 1000 effective_user_id: 1000 env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 46990 22 SSH_CONNECTION: 38.102.83.114 46990 38.102.83.32 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '1' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.32 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe0e:bbd6 prefix: '64' scope: link macaddress: fa:16:3e:0e:bb:d6 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: compute-0 gather_subset: - all hostname: compute-0 hostnqn: nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0 interfaces: - lo - eth0 is_chroot: false iscsi_iqn: '' kernel: 5.14.0-620.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.02 1m: 0.28 5m: 0.08 locally_reachable_ips: ipv4: - 38.102.83.32 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe0e:bbd6 lsb: {} lvm: N/A machine: x86_64 machine_id: 42833e1b511a402df82cb9cb2fc36491 memfree_mb: 7254 memory_mb: nocache: free: 7404 used: 275 real: free: 7254 total: 7679 used: 425 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 7679 module_setup: true mounts: - block_available: 20378766 block_size: 4096 block_total: 20954875 block_used: 576109 device: /dev/vda1 fstype: xfs inode_available: 41888295 inode_total: 41942512 inode_used: 54217 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83471425536 size_total: 85831168000 uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 nodename: compute-0 os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 8 processor_nproc: 8 processor_threads_per_core: 1 processor_vcpus: 8 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.2.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 23 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 23 - final - 0 python_version: 3.9.23 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBIg93BG4tSk/84rOcp3l7cvT84i0YRnciDkOHZlwA3wQIFgrL1A0rqYai7TpTc1TpKzwhnzmSp31Tf1y362AD5M= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIPFRJ9KVOyWI6b5GPMrBBHR84riUKq85zgonp/Fzjoym ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQDUrGIqXWX1G68eUKbYgZhPfXeqi0/aRwJ752atv5Fvu5YvwS+xZ9qPFN1/H0W6GqdzZ2BIjA8F9fjVp0kUEiFo0A2RuRUOkcrXmArHjai5O+ndkpDgh7A6b9FhR5uwB7ADP+oDKz4tEvPnIhpveDDYPIyiDVZvJH9EQJJ4FiYph2ILtSvWACLyW6wcx9Zh5TA5EljES6LEyvbSP0v1LS7xW9mB2GQbYOebDcODeIYoCZcW/GqC7iVay5Lyfa1BVecf0xDcwifAb1A/0izjJT5Es3eAYvHW89zG84GLCvvtYfsvlFYoq58okz5oMrPQ7I2ypFZ2njF5Hkb3Hbpj2E/Fku66rJhbhevVuIvCxoW/r/YQ/w4r7YU30LMs9GSOITwwxgCt6kshYGAOW6oMfjvp7r5fVwSqZxl36++Xb58XoziGdog7cpXjG0fbWJnDi1pYhIaOnzH203XUdOLntFy969F+tMQ0AoWIt8vLK1LmtB6lOiLnTFkiWRs9i+ziczc= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 233 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: compute-0 ansible_host: 38.102.83.32 ansible_hostname: compute-0 ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0 ansible_interfaces: - lo - eth0 ansible_inventory_sources: - /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/inventory.yaml ansible_is_chroot: false ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-620.el9.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.02 1m: 0.28 5m: 0.08 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.32 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe0e:bbd6 ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 42833e1b511a402df82cb9cb2fc36491 ansible_memfree_mb: 7254 ansible_memory_mb: nocache: free: 7404 used: 275 real: free: 7254 total: 7679 used: 425 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 7679 ansible_mounts: - block_available: 20378766 block_size: 4096 block_total: 20954875 block_used: 576109 device: /dev/vda1 fstype: xfs inode_available: 41888295 inode_total: 41942512 inode_used: 54217 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83471425536 size_total: 85831168000 uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_nodename: compute-0 ansible_os_family: RedHat ansible_pkg_mgr: dnf ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 8 ansible_processor_nproc: 8 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 8 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.2.1 ansible_python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 23 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 23 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.23 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBIg93BG4tSk/84rOcp3l7cvT84i0YRnciDkOHZlwA3wQIFgrL1A0rqYai7TpTc1TpKzwhnzmSp31Tf1y362AD5M= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIPFRJ9KVOyWI6b5GPMrBBHR84riUKq85zgonp/Fzjoym ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQDUrGIqXWX1G68eUKbYgZhPfXeqi0/aRwJ752atv5Fvu5YvwS+xZ9qPFN1/H0W6GqdzZ2BIjA8F9fjVp0kUEiFo0A2RuRUOkcrXmArHjai5O+ndkpDgh7A6b9FhR5uwB7ADP+oDKz4tEvPnIhpveDDYPIyiDVZvJH9EQJJ4FiYph2ILtSvWACLyW6wcx9Zh5TA5EljES6LEyvbSP0v1LS7xW9mB2GQbYOebDcODeIYoCZcW/GqC7iVay5Lyfa1BVecf0xDcwifAb1A/0izjJT5Es3eAYvHW89zG84GLCvvtYfsvlFYoq58okz5oMrPQ7I2ypFZ2njF5Hkb3Hbpj2E/Fku66rJhbhevVuIvCxoW/r/YQ/w4r7YU30LMs9GSOITwwxgCt6kshYGAOW6oMfjvp7r5fVwSqZxl36++Xb58XoziGdog7cpXjG0fbWJnDi1pYhIaOnzH203XUdOLntFy969F+tMQ0AoWIt8vLK1LmtB6lOiLnTFkiWRs9i+ziczc= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 233 ansible_user: zuul ansible_user_dir: /home/zuul ansible_user_gecos: '' ansible_user_gid: 1000 ansible_user_id: zuul ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/multinode-ci.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/horizon.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/scenarios/edpm-no-notifications.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: podified-epoxy-centos9 cifmw_test_operator_tempest_registry: 38.102.83.53:5001 cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: 38.102.83.53:5001 cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: vexxhost crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 discovered_interpreter_python: /usr/bin/python3 enable_ramdisk: true fetch_dlrn_hash: false gather_subset: - all group_names: - computes groups: all: - compute-0 - compute-1 - controller - crc computes: - compute-0 - compute-1 ocps: - crc ungrouped: *id001 zuul_unreachable: [] inventory_dir: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0 inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/inventory.yaml inventory_hostname: compute-0 inventory_hostname_short: compute-0 module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 660cfe67-cb09-4d1b-96c1-30f05d27cde7 host_id: d19710e37f7b2620eb9f1bc9cfdfc06732b1f0c31221781941dd4533 interface_ip: 38.102.83.32 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.32 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.32 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__7c6be8c090d39d526a4b5005be5e95dd82bdf54e playbook_dir: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true unsafe_vars: ansible_connection: ssh ansible_host: 38.102.83.32 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 660cfe67-cb09-4d1b-96c1-30f05d27cde7 host_id: d19710e37f7b2620eb9f1bc9cfdfc06732b1f0c31221781941dd4533 interface_ip: 38.102.83.32 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.32 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.32 public_ipv6: '' region: RegionOne slot: null push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul_log_collection: false watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 9ce4c11f9f6a4904bf6148a8276a3232 build_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null buildset: f9416ac601264548b137ce1f44fe627c buildset_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 child_jobs: [] commit_id: 14377136e67c9cd67507a059bfde2f19f140387d event_id: 7dde6e80-a2f2-11f0-83f1-b4af7183f5ac executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/logs result_data_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/results.json src_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/src work_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work items: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null job: watcher-operator-validation-epoxy-ocp4-16 jobtags: [] max_attempts: 1 message: W1dJUF0gTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIHRvIGNvbnRyb2xwbGFuZSBsZXZlbAoKTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIGZyb20gZW5hYmxpbmcgYXQgbm92YS9jaW5kZXIvd2F0Y2hlciBsZXZlbCB0byBvcGVuc3RhY2sgY29udHJvbHBsYW5lIGxldmVsIGFmdGVyIHRoYXQgdXNhZ2UgaXMgYXZhaWxhYmxlIHNpbmNlIGh0dHBzOi8vZ2l0aHViLmNvbS9vcGVuc3RhY2stazhzLW9wZXJhdG9ycy9vcGVuc3RhY2stb3BlcmF0b3IvcHVsbC8xNTkx patchset: 14377136e67c9cd67507a059bfde2f19f140387d pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 playbooks: - path: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/edpm/run.yml roles: - checkout: main checkout_description: playbook branch link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: f6ed2f2d118884a075895bbf954ff6000e540430 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: zuul branch commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: zuul branch commit: 95aa63de3182faad63a69301d101debad3efc936 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: zuul branch commit: 2b5048bbcae44dfeaacbb43830318ca45c13f182 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: zuul branch commit: bb26118ddc70016cbd2118a0b0a35d5f6ab9c343 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: main checkout_description: zuul branch commit: 3bf7652f010ead15ac2d2fec7e3b71c442b8fb8d name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: zuul branch commit: 748dff8508cbb49e00426d46a4487b9f4c0b0096 name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: zuul branch commit: 245af87e94976809f2023f59c19dffb95df97ed9 name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: zuul branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup github.com/openstack-k8s-operators/watcher-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator checkout: main checkout_description: zuul branch commit: 14377136e67c9cd67507a059bfde2f19f140387d name: openstack-k8s-operators/watcher-operator required: false short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 941f6f7666fdff0145523beb29ceda8db25c234c name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/287/head resources: {} tenant: rdoproject.org timeout: 10800 topic: null voting: true zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '0' zuul_execution_trusted: 'False' zuul_log_collection: false zuul_success: 'False' zuul_will_retry: 'False' compute-1: ansible_all_ipv4_addresses: - 38.102.83.194 ansible_all_ipv6_addresses: - fe80::f816:3eff:fe66:118a ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_config_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2025-10-06' day: '06' epoch: '1759784332' epoch_int: '1759784332' hour: '16' iso8601: '2025-10-06T20:58:52Z' iso8601_basic: 20251006T165852997277 iso8601_basic_short: 20251006T165852 iso8601_micro: '2025-10-06T20:58:52.997277Z' minute: '58' month: '10' second: '52' time: '16:58:52' tz: EDT tz_dst: EDT tz_offset: '-0400' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' ansible_default_ipv4: address: 38.102.83.194 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:66:11:8a mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-10-06-20-57-24-00 vda1: - 1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-57-24-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 1631a6ad-43b8-436d-ae76-16fa14b94458 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: CentOS ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/centos-release ansible_distribution_file_variety: CentOS ansible_distribution_major_version: '9' ansible_distribution_release: Stream ansible_distribution_version: '9' ansible_dns: nameservers: - 199.204.44.24 - 199.204.47.54 search: - novalocal ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 50410 22 SSH_CONNECTION: 38.102.83.114 50410 38.102.83.194 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '1' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f ansible_eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.194 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe66:118a prefix: '64' scope: link macaddress: fa:16:3e:66:11:8a module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.194 all_ipv6_addresses: - fe80::f816:3eff:fe66:118a ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 date_time: date: '2025-10-06' day: '06' epoch: '1759784332' epoch_int: '1759784332' hour: '16' iso8601: '2025-10-06T20:58:52Z' iso8601_basic: 20251006T165852997277 iso8601_basic_short: 20251006T165852 iso8601_micro: '2025-10-06T20:58:52.997277Z' minute: '58' month: '10' second: '52' time: '16:58:52' tz: EDT tz_dst: EDT tz_offset: '-0400' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' default_ipv4: address: 38.102.83.194 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:66:11:8a mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-10-06-20-57-24-00 vda1: - 1631a6ad-43b8-436d-ae76-16fa14b94458 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-57-24-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 1631a6ad-43b8-436d-ae76-16fa14b94458 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 199.204.44.24 - 199.204.47.54 search: - novalocal domain: '' effective_group_id: 1000 effective_user_id: 1000 env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 50410 22 SSH_CONNECTION: 38.102.83.114 50410 38.102.83.194 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '1' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.194 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe66:118a prefix: '64' scope: link macaddress: fa:16:3e:66:11:8a module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: compute-1 gather_subset: - all hostname: compute-1 hostnqn: nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0 interfaces: - eth0 - lo is_chroot: false iscsi_iqn: '' kernel: 5.14.0-620.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.01 1m: 0.14 5m: 0.04 locally_reachable_ips: ipv4: - 38.102.83.194 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe66:118a lsb: {} lvm: N/A machine: x86_64 machine_id: 42833e1b511a402df82cb9cb2fc36491 memfree_mb: 7236 memory_mb: nocache: free: 7387 used: 292 real: free: 7236 total: 7679 used: 443 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 7679 module_setup: true mounts: - block_available: 20378766 block_size: 4096 block_total: 20954875 block_used: 576109 device: /dev/vda1 fstype: xfs inode_available: 41888295 inode_total: 41942512 inode_used: 54217 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83471425536 size_total: 85831168000 uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 nodename: compute-1 os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 8 processor_nproc: 8 processor_threads_per_core: 1 processor_vcpus: 8 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.2.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 23 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 23 - final - 0 python_version: 3.9.23 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMTgbNX9dV8PbdYRYJgdLGAmpYTw9D60MdrTlouplSpg0YYsXgPcoTYqPcnsQeu0q8Dhp49vz6EaftjDleBSPRc= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIKyyYn981xqM8QXbrWUowTM5nwuXYFJQ6duZFmdVeYTr ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQDD/EaxevhIIgCsjkZD8fsuuLFaKprWcI8Xf9fpPEfi8Zn71kEhnu+1usw1HlgsncOk/flzasKRGJjCFox0PUgX8jMrj2ZGOhiGL6aPNkXgxmFXGJYcgV3/Gmb0aSRZQ9FpDzH//d9vxTKFg+h3QwZvfCPR6qqSihmJUv7vH71hDFTTf6MzShu1bZwZDTCZ3z2ti0ty96iJjliYwoBVtelM93cBzsyI0TYzKdfnhDWkzM3oEIOxzxhaQPPYuPr7mX4I3HDh2xHcoaxD3bFvcuvHKi8C1s6MZ5W5Iht+bbNFEkl/4bafo0wEsxbKnfhBCBAKG4RQ0SnDspbqLSkzSxSH7Liqflynj4XfektVZIZrJ8nUmJ22iOCiz6lGqViyfxZfduSRlibuOXqaMH0ImCIDRthFsqToCMyRty1XpB32ulYKlrstGqe1T4ZdW00u8+xu5bXw/+BUHo689PipHhqtdu5XBZUZr137oxQ7aeQCg4mr2eazVG07cLhoHdGqoRk= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 44 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: compute-1 ansible_host: 38.102.83.194 ansible_hostname: compute-1 ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0 ansible_interfaces: - eth0 - lo ansible_inventory_sources: - /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/inventory.yaml ansible_is_chroot: false ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-620.el9.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.01 1m: 0.14 5m: 0.04 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.194 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe66:118a ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 42833e1b511a402df82cb9cb2fc36491 ansible_memfree_mb: 7236 ansible_memory_mb: nocache: free: 7387 used: 292 real: free: 7236 total: 7679 used: 443 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 7679 ansible_mounts: - block_available: 20378766 block_size: 4096 block_total: 20954875 block_used: 576109 device: /dev/vda1 fstype: xfs inode_available: 41888295 inode_total: 41942512 inode_used: 54217 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83471425536 size_total: 85831168000 uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_nodename: compute-1 ansible_os_family: RedHat ansible_pkg_mgr: dnf ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 8 ansible_processor_nproc: 8 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 8 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.2.1 ansible_python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 23 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 23 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.23 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMTgbNX9dV8PbdYRYJgdLGAmpYTw9D60MdrTlouplSpg0YYsXgPcoTYqPcnsQeu0q8Dhp49vz6EaftjDleBSPRc= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIKyyYn981xqM8QXbrWUowTM5nwuXYFJQ6duZFmdVeYTr ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQDD/EaxevhIIgCsjkZD8fsuuLFaKprWcI8Xf9fpPEfi8Zn71kEhnu+1usw1HlgsncOk/flzasKRGJjCFox0PUgX8jMrj2ZGOhiGL6aPNkXgxmFXGJYcgV3/Gmb0aSRZQ9FpDzH//d9vxTKFg+h3QwZvfCPR6qqSihmJUv7vH71hDFTTf6MzShu1bZwZDTCZ3z2ti0ty96iJjliYwoBVtelM93cBzsyI0TYzKdfnhDWkzM3oEIOxzxhaQPPYuPr7mX4I3HDh2xHcoaxD3bFvcuvHKi8C1s6MZ5W5Iht+bbNFEkl/4bafo0wEsxbKnfhBCBAKG4RQ0SnDspbqLSkzSxSH7Liqflynj4XfektVZIZrJ8nUmJ22iOCiz6lGqViyfxZfduSRlibuOXqaMH0ImCIDRthFsqToCMyRty1XpB32ulYKlrstGqe1T4ZdW00u8+xu5bXw/+BUHo689PipHhqtdu5XBZUZr137oxQ7aeQCg4mr2eazVG07cLhoHdGqoRk= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 44 ansible_user: zuul ansible_user_dir: /home/zuul ansible_user_gecos: '' ansible_user_gid: 1000 ansible_user_id: zuul ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/multinode-ci.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/horizon.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/scenarios/edpm-no-notifications.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: podified-epoxy-centos9 cifmw_test_operator_tempest_registry: 38.102.83.53:5001 cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: 38.102.83.53:5001 cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: vexxhost crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 discovered_interpreter_python: /usr/bin/python3 enable_ramdisk: true fetch_dlrn_hash: false gather_subset: - all group_names: - computes groups: all: - compute-0 - compute-1 - controller - crc computes: - compute-0 - compute-1 ocps: - crc ungrouped: *id001 zuul_unreachable: [] inventory_dir: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0 inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/inventory.yaml inventory_hostname: compute-1 inventory_hostname_short: compute-1 module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: ff124b62-670a-418b-9791-f4b76e5224ee host_id: b012578aee5370fae73eb6c92c4679617335173cccca05390470f411 interface_ip: 38.102.83.194 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.194 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.194 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__7c6be8c090d39d526a4b5005be5e95dd82bdf54e playbook_dir: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true unsafe_vars: ansible_connection: ssh ansible_host: 38.102.83.194 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: ff124b62-670a-418b-9791-f4b76e5224ee host_id: b012578aee5370fae73eb6c92c4679617335173cccca05390470f411 interface_ip: 38.102.83.194 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.194 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.194 public_ipv6: '' region: RegionOne slot: null push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul_log_collection: false watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 9ce4c11f9f6a4904bf6148a8276a3232 build_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null buildset: f9416ac601264548b137ce1f44fe627c buildset_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 child_jobs: [] commit_id: 14377136e67c9cd67507a059bfde2f19f140387d event_id: 7dde6e80-a2f2-11f0-83f1-b4af7183f5ac executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/logs result_data_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/results.json src_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/src work_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work items: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null job: watcher-operator-validation-epoxy-ocp4-16 jobtags: [] max_attempts: 1 message: W1dJUF0gTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIHRvIGNvbnRyb2xwbGFuZSBsZXZlbAoKTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIGZyb20gZW5hYmxpbmcgYXQgbm92YS9jaW5kZXIvd2F0Y2hlciBsZXZlbCB0byBvcGVuc3RhY2sgY29udHJvbHBsYW5lIGxldmVsIGFmdGVyIHRoYXQgdXNhZ2UgaXMgYXZhaWxhYmxlIHNpbmNlIGh0dHBzOi8vZ2l0aHViLmNvbS9vcGVuc3RhY2stazhzLW9wZXJhdG9ycy9vcGVuc3RhY2stb3BlcmF0b3IvcHVsbC8xNTkx patchset: 14377136e67c9cd67507a059bfde2f19f140387d pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 playbooks: - path: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/edpm/run.yml roles: - checkout: main checkout_description: playbook branch link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: f6ed2f2d118884a075895bbf954ff6000e540430 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: zuul branch commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: zuul branch commit: 95aa63de3182faad63a69301d101debad3efc936 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: zuul branch commit: 2b5048bbcae44dfeaacbb43830318ca45c13f182 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: zuul branch commit: bb26118ddc70016cbd2118a0b0a35d5f6ab9c343 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: main checkout_description: zuul branch commit: 3bf7652f010ead15ac2d2fec7e3b71c442b8fb8d name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: zuul branch commit: 748dff8508cbb49e00426d46a4487b9f4c0b0096 name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: zuul branch commit: 245af87e94976809f2023f59c19dffb95df97ed9 name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: zuul branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup github.com/openstack-k8s-operators/watcher-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator checkout: main checkout_description: zuul branch commit: 14377136e67c9cd67507a059bfde2f19f140387d name: openstack-k8s-operators/watcher-operator required: false short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 941f6f7666fdff0145523beb29ceda8db25c234c name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/287/head resources: {} tenant: rdoproject.org timeout: 10800 topic: null voting: true zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '0' zuul_execution_trusted: 'False' zuul_log_collection: false zuul_success: 'False' zuul_will_retry: 'False' controller: _param_dir: changed: true cmd: - ls - /home/zuul/ci-framework-data/artifacts/parameters delta: '0:00:00.007078' end: '2025-10-06 21:12:10.478936' failed: false msg: '' rc: 0 start: '2025-10-06 21:12:10.471858' stderr: '' stderr_lines: [] stdout: 'custom-params.yml install-yamls-params.yml openshift-login-params.yml zuul-params.yml' stdout_lines: - custom-params.yml - install-yamls-params.yml - openshift-login-params.yml - zuul-params.yml zuul_log_id: fa163ec2-ffbe-e6cf-bc15-000000000861-1-controller _param_file: changed: false failed: false stat: exists: false _parsed_vars: changed: false msg: All items completed results: - ansible_loop_var: item changed: false content: cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw
cifmw_basedir: /home/zuul/ci-framework-data
cifmw_build_images_output: {}
cifmw_config_certmanager: true
cifmw_deploy_edpm: true
cifmw_dlrn_report_result: false
cifmw_edpm_prepare_kustomizations:
-   apiVersion: kustomize.config.k8s.io/v1beta1
    kind: Kustomization
    namespace: openstack
    patches:
    -   patch: "apiVersion: core.openstack.org/v1beta1\nkind: OpenStackControlPlane\nmetadata:\n
            \ name: controlplane\nspec:\n  telemetry:\n    enabled: true\n    template:\n
            \     ceilometer:\n        enabled: true\n      metricStorage:\n        enabled:
            true\n        customMonitoringStack:\n          alertmanagerConfig:\n
            \           disabled: true\n          prometheusConfig:\n            enableRemoteWriteReceiver:
            true\n            persistentVolumeClaim:\n              resources:\n                requests:\n
            \                 storage: 20G\n            replicas: 1\n            scrapeInterval:
            30s\n          resourceSelector:\n            matchLabels:\n              service:
            metricStorage\n          retention: 24h"
        target:
            kind: OpenStackControlPlane
    -   patch: "apiVersion: core.openstack.org/v1beta1\nkind: OpenStackControlPlane\nmetadata:\n
            \ name: controlplane\nspec:\n  telemetry:\n    template:\n      metricStorage:\n
            \       monitoringStack: null"
        target:
            kind: OpenStackControlPlane
    -   patch: "apiVersion: core.openstack.org/v1beta1\nkind: OpenStackControlPlane\nmetadata:\n
            \ name: controlplane\nspec:\n  watcher:\n    enabled: true\n    template:\n
            \     decisionengineServiceTemplate:\n        customServiceConfig: |\n
            \         [watcher_cluster_data_model_collectors.compute]\n          period
            = 60\n          [watcher_cluster_data_model_collectors.storage]\n          period
            = 60"
        target:
            kind: OpenStackControlPlane
cifmw_edpm_prepare_skip_crc_storage_creation: true
cifmw_edpm_prepare_timeout: 60
cifmw_edpm_telemetry_enabled_exporters:
- podman_exporter
- openstack_network_exporter
cifmw_extras:
- '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/multinode-ci.yml'
- '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/horizon.yml'
- '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/scenarios/edpm-no-notifications.yml'
- '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/tests/watcher-tempest.yml'
cifmw_installyamls_repos: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls
cifmw_openshift_api: api.crc.testing:6443
cifmw_openshift_password: '123456789'
cifmw_openshift_setup_skip_internal_registry: true
cifmw_openshift_setup_skip_internal_registry_tls_verify: true
cifmw_openshift_skip_tls_verify: true
cifmw_openshift_user: kubeadmin
cifmw_operator_build_meta_name: openstack-operator
cifmw_operator_build_output:
    operators:
        openstack-operator:
            git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9
            git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator
            image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9
            image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9
            image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9
        watcher-operator:
            git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d
            git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator
            image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d
            image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d
            image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d
cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin
cifmw_repo_setup_dist_major_version: 9
cifmw_repo_setup_os_release: centos
cifmw_run_test_role: test_operator
cifmw_run_tests: true
cifmw_test_operator_tempest_concurrency: 1
cifmw_test_operator_tempest_exclude_list: 'watcher_tempest_plugin.*client_functional.*

    watcher_tempest_plugin.tests.scenario.test_execute_strategies.TestExecuteStrategies.test_execute_storage_capacity_balance_strategy

    watcher_tempest_plugin.*\[.*\breal_load\b.*\].*

    watcher_tempest_plugin.tests.scenario.test_execute_zone_migration.TestExecuteZoneMigrationStrategy.test_execute_zone_migration_without_destination_host

    watcher_tempest_plugin.*\[.*\bvolume_migration\b.*\].*

    '
cifmw_test_operator_tempest_external_plugin:
-   changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e
    changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin
    repository: https://opendev.org/openstack/watcher-tempest-plugin.git
cifmw_test_operator_tempest_image_tag: watcher_latest
cifmw_test_operator_tempest_include_list: 'watcher_tempest_plugin.*

    '
cifmw_test_operator_tempest_namespace: podified-epoxy-centos9
cifmw_test_operator_tempest_registry: 38.102.83.53:5001
cifmw_test_operator_tempest_tempestconf_config:
    overrides: 'compute.min_microversion 2.56

        compute.min_compute_nodes 2

        placement.min_microversion 1.29

        compute-feature-enabled.live_migration true

        compute-feature-enabled.block_migration_for_live_migration true

        service_available.sg_core true

        telemetry_services.metric_backends prometheus

        telemetry.disable_ssl_certificate_validation true

        telemetry.ceilometer_polling_interval 15

        optimize.min_microversion 1.0

        optimize.max_microversion 1.4

        optimize.datasource prometheus

        optimize.openstack_type podified

        optimize.proxy_host_address 38.102.83.51

        optimize.proxy_host_user zuul

        optimize.prometheus_host metric-storage-prometheus.openstack.svc

        optimize.prometheus_ssl_enabled true

        optimize.prometheus_ssl_cert_dir /etc/prometheus/secrets/combined-ca-bundle

        optimize.podified_kubeconfig_path /home/zuul/.crc/machines/crc/kubeconfig

        optimize.podified_namespace openstack

        optimize.run_continuous_audit_tests true

        '
cifmw_update_containers: true
cifmw_update_containers_openstack: false
cifmw_update_containers_org: podified-epoxy-centos9
cifmw_update_containers_registry: 38.102.83.53:5001
cifmw_update_containers_tag: watcher_latest
cifmw_update_containers_watcher: true
cifmw_use_crc: false
cifmw_use_libvirt: false
cifmw_zuul_target_host: controller
post_ctlplane_deploy:
-   name: Tune rabbitmq resources
    source: rabbitmq_tuning.yml
    type: playbook
post_deploy:
-   inventory: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/hosts
    name: Download needed tools
    source: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml
    type: playbook
-   name: Patch Openstack Prometheus to enable admin API
    source: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/playbooks/prometheus_admin_api.yaml
    type: playbook
post_infra:
-   inventory: /home/zuul/ci-framework-data/artifacts/zuul_inventory.yml
    name: Fetch nodes facts and save them as parameters
    source: fetch_compute_facts.yml
    type: playbook
pre_deploy:
-   name: 80 Kustomize OpenStack CR
    source: control_plane_horizon.yml
    type: playbook
pre_deploy_create_coo_subscription:
-   name: Deploy cluster-observability-operator
    source: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/playbooks/deploy_cluster_observability_operator.yaml
    type: playbook
pre_infra:
-   connection: local
    inventory: localhost,
    name: Download needed tools
    source: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml
    type: playbook
pre_update:
-   inventory: /home/zuul/ci-framework-data/artifacts/zuul_inventory.yml
    name: Fetch nodes facts and save them as parameters
    source: fetch_compute_facts.yml
    type: playbook
 encoding: base64 failed: false invocation: module_args: src: /home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml item: custom-params.yml source: /home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml - ansible_loop_var: item changed: false content: cifmw_install_yamls_defaults:
    ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24
    ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24
    ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24
    ADOPTED_STORAGE_NETWORK: 172.18.1.0/24
    ADOPTED_TENANT_NETWORK: 172.9.1.0/24
    ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml
    ANSIBLEEE_BRANCH: main
    ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml
    ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest
    ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml
    ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/tests/kuttl/tests
    ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests
    ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator
    ANSIBLEE_COMMIT_HASH: ''
    BARBICAN: config/samples/barbican_v1beta1_barbican.yaml
    BARBICAN_BRANCH: main
    BARBICAN_COMMIT_HASH: ''
    BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml
    BARBICAN_DEPL_IMG: unused
    BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest
    BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml
    BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/tests/kuttl/tests
    BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests
    BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git
    BARBICAN_SERVICE_ENABLED: 'true'
    BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU=
    BAREMETAL_BRANCH: main
    BAREMETAL_COMMIT_HASH: ''
    BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest
    BAREMETAL_OS_CONTAINER_IMG: ''
    BAREMETAL_OS_IMG: ''
    BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git
    BAREMETAL_TIMEOUT: 20m
    BASH_IMG: quay.io/openstack-k8s-operators/bash:latest
    BGP_ASN: '64999'
    BGP_LEAF_1: 100.65.4.1
    BGP_LEAF_2: 100.64.4.1
    BGP_OVN_ROUTING: 'false'
    BGP_PEER_ASN: '64999'
    BGP_SOURCE_IP: 172.30.4.2
    BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42
    BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24
    BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64
    BMAAS_INSTANCE_DISK_SIZE: '20'
    BMAAS_INSTANCE_MEMORY: '4096'
    BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas
    BMAAS_INSTANCE_NET_MODEL: virtio
    BMAAS_INSTANCE_OS_VARIANT: centos-stream9
    BMAAS_INSTANCE_VCPUS: '2'
    BMAAS_INSTANCE_VIRT_TYPE: kvm
    BMAAS_IPV4: 'true'
    BMAAS_IPV6: 'false'
    BMAAS_LIBVIRT_USER: sushyemu
    BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26
    BMAAS_METALLB_POOL_NAME: baremetal
    BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24
    BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64
    BMAAS_NETWORK_NAME: crc-bmaas
    BMAAS_NODE_COUNT: '1'
    BMAAS_OCP_INSTANCE_NAME: crc
    BMAAS_REDFISH_PASSWORD: password
    BMAAS_REDFISH_USERNAME: admin
    BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default
    BMAAS_SUSHY_EMULATOR_DRIVER: libvirt
    BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest
    BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator
    BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml
    BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack
    BMH_NAMESPACE: openstack
    BMO_BRANCH: release-0.9
    BMO_COMMIT_HASH: ''
    BMO_IPA_BRANCH: stable/2024.1
    BMO_IRONIC_HOST: 192.168.122.10
    BMO_PROVISIONING_INTERFACE: ''
    BMO_REPO: https://github.com/metal3-io/baremetal-operator
    BMO_SETUP: false
    BMO_SETUP_ROUTE_REPLACE: 'true'
    BM_CTLPLANE_INTERFACE: enp1s0
    BM_INSTANCE_MEMORY: '8192'
    BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal
    BM_INSTANCE_NAME_SUFFIX: '0'
    BM_NETWORK_NAME: default
    BM_NODE_COUNT: '1'
    BM_ROOT_PASSWORD: ''
    BM_ROOT_PASSWORD_SECRET: ''
    CEILOMETER_CENTRAL_DEPL_IMG: unused
    CEILOMETER_NOTIFICATION_DEPL_IMG: unused
    CEPH_BRANCH: release-1.15
    CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml
    CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml
    CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml
    CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml
    CEPH_IMG: quay.io/ceph/demo:latest-squid
    CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml
    CEPH_REPO: https://github.com/rook/rook.git
    CERTMANAGER_TIMEOUT: 300s
    CHECKOUT_FROM_OPENSTACK_REF: 'true'
    CINDER: config/samples/cinder_v1beta1_cinder.yaml
    CINDERAPI_DEPL_IMG: unused
    CINDERBKP_DEPL_IMG: unused
    CINDERSCH_DEPL_IMG: unused
    CINDERVOL_DEPL_IMG: unused
    CINDER_BRANCH: main
    CINDER_COMMIT_HASH: ''
    CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml
    CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest
    CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml
    CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests
    CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests
    CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git
    CLEANUP_DIR_CMD: rm -Rf
    CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11'
    CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12'
    CRC_HTTPS_PROXY: ''
    CRC_HTTP_PROXY: ''
    CRC_STORAGE_NAMESPACE: crc-storage
    CRC_STORAGE_RETRIES: '3'
    CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz'''
    CRC_VERSION: latest
    DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret
    DATAPLANE_ANSIBLE_USER: ''
    DATAPLANE_COMPUTE_IP: 192.168.122.100
    DATAPLANE_CONTAINER_PREFIX: openstack
    DATAPLANE_CONTAINER_TAG: current-podified
    DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest
    DATAPLANE_DEFAULT_GW: 192.168.122.1
    DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null
    DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100%
    DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned
    DATAPLANE_NETWORKER_IP: 192.168.122.200
    DATAPLANE_NETWORK_INTERFACE_NAME: eth0
    DATAPLANE_NOVA_NFS_PATH: ''
    DATAPLANE_NTP_SERVER: pool.ntp.org
    DATAPLANE_PLAYBOOK: osp.edpm.download_cache
    DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9
    DATAPLANE_RUNNER_IMG: ''
    DATAPLANE_SERVER_ROLE: compute
    DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']'
    DATAPLANE_TIMEOUT: 30m
    DATAPLANE_TLS_ENABLED: 'true'
    DATAPLANE_TOTAL_NETWORKER_NODES: '1'
    DATAPLANE_TOTAL_NODES: '1'
    DBSERVICE: galera
    DESIGNATE: config/samples/designate_v1beta1_designate.yaml
    DESIGNATE_BRANCH: main
    DESIGNATE_COMMIT_HASH: ''
    DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml
    DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest
    DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml
    DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/tests/kuttl/tests
    DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests
    DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git
    DNSDATA: config/samples/network_v1beta1_dnsdata.yaml
    DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml
    DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml
    DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml
    DNS_DEPL_IMG: unused
    DNS_DOMAIN: localdomain
    DOWNLOAD_TOOLS_SELECTION: all
    EDPM_ATTACH_EXTNET: 'true'
    EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]'''
    EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]'''
    EDPM_COMPUTE_CELLS: '1'
    EDPM_COMPUTE_CEPH_ENABLED: 'true'
    EDPM_COMPUTE_CEPH_NOVA: 'true'
    EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true'
    EDPM_COMPUTE_SRIOV_ENABLED: 'true'
    EDPM_COMPUTE_SUFFIX: '0'
    EDPM_CONFIGURE_DEFAULT_ROUTE: 'true'
    EDPM_CONFIGURE_HUGEPAGES: 'false'
    EDPM_CONFIGURE_NETWORKING: 'true'
    EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra
    EDPM_NETWORKER_SUFFIX: '0'
    EDPM_TOTAL_NETWORKERS: '1'
    EDPM_TOTAL_NODES: '1'
    GALERA_REPLICAS: ''
    GENERATE_SSH_KEYS: 'true'
    GIT_CLONE_OPTS: ''
    GLANCE: config/samples/glance_v1beta1_glance.yaml
    GLANCEAPI_DEPL_IMG: unused
    GLANCE_BRANCH: main
    GLANCE_COMMIT_HASH: ''
    GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml
    GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest
    GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml
    GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests
    GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests
    GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git
    HEAT: config/samples/heat_v1beta1_heat.yaml
    HEATAPI_DEPL_IMG: unused
    HEATCFNAPI_DEPL_IMG: unused
    HEATENGINE_DEPL_IMG: unused
    HEAT_AUTH_ENCRYPTION_KEY: 767c3ed056cbaa3b9dfedb8c6f825bf0
    HEAT_BRANCH: main
    HEAT_COMMIT_HASH: ''
    HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml
    HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest
    HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml
    HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/tests/kuttl/tests
    HEAT_KUTTL_NAMESPACE: heat-kuttl-tests
    HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git
    HEAT_SERVICE_ENABLED: 'true'
    HORIZON: config/samples/horizon_v1beta1_horizon.yaml
    HORIZON_BRANCH: main
    HORIZON_COMMIT_HASH: ''
    HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml
    HORIZON_DEPL_IMG: unused
    HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest
    HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml
    HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/tests/kuttl/tests
    HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests
    HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git
    INFRA_BRANCH: main
    INFRA_COMMIT_HASH: ''
    INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest
    INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml
    INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/tests/kuttl/tests
    INFRA_KUTTL_NAMESPACE: infra-kuttl-tests
    INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git
    INSTALL_CERT_MANAGER: false
    INSTALL_NMSTATE: true || false
    INSTALL_NNCP: true || false
    INTERNALAPI_HOST_ROUTES: ''
    IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24
    IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64
    IPV6_LAB_LIBVIRT_STORAGE_POOL: default
    IPV6_LAB_MANAGE_FIREWALLD: 'true'
    IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24
    IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64
    IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router
    IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64
    IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24
    IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1
    IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3
    IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96
    IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false'
    IPV6_LAB_NETWORK_NAME: nat64
    IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48
    IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11
    IPV6_LAB_SNO_HOST_PREFIX: '64'
    IPV6_LAB_SNO_INSTANCE_NAME: sno
    IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64
    IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp
    IPV6_LAB_SNO_OCP_VERSION: latest-4.14
    IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112
    IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub
    IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab
    IRONIC: config/samples/ironic_v1beta1_ironic.yaml
    IRONICAPI_DEPL_IMG: unused
    IRONICCON_DEPL_IMG: unused
    IRONICINS_DEPL_IMG: unused
    IRONICNAG_DEPL_IMG: unused
    IRONICPXE_DEPL_IMG: unused
    IRONIC_BRANCH: main
    IRONIC_COMMIT_HASH: ''
    IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml
    IRONIC_IMAGE_TAG: release-24.1
    IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest
    IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml
    IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/tests/kuttl/tests
    IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests
    IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git
    KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml
    KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml
    KEYSTONEAPI_DEPL_IMG: unused
    KEYSTONE_BRANCH: main
    KEYSTONE_COMMIT_HASH: ''
    KEYSTONE_FEDERATION_CLIENT_SECRET: COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f
    KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack
    KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest
    KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml
    KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/tests/kuttl/tests
    KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests
    KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git
    KUBEADMIN_PWD: '12345678'
    LIBVIRT_SECRET: libvirt-secret
    LOKI_DEPLOY_MODE: openshift-network
    LOKI_DEPLOY_NAMESPACE: netobserv
    LOKI_DEPLOY_SIZE: 1x.demo
    LOKI_NAMESPACE: openshift-operators-redhat
    LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki
    LOKI_SUBSCRIPTION: loki-operator
    LVMS_CR: '1'
    MANILA: config/samples/manila_v1beta1_manila.yaml
    MANILAAPI_DEPL_IMG: unused
    MANILASCH_DEPL_IMG: unused
    MANILASHARE_DEPL_IMG: unused
    MANILA_BRANCH: main
    MANILA_COMMIT_HASH: ''
    MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml
    MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest
    MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml
    MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests
    MANILA_KUTTL_NAMESPACE: manila-kuttl-tests
    MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git
    MANILA_SERVICE_ENABLED: 'true'
    MARIADB: config/samples/mariadb_v1beta1_galera.yaml
    MARIADB_BRANCH: main
    MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/config.yaml
    MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/tests
    MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests
    MARIADB_COMMIT_HASH: ''
    MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml
    MARIADB_DEPL_IMG: unused
    MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest
    MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml
    MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/kuttl/tests
    MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests
    MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git
    MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml
    MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml
    MEMCACHED_DEPL_IMG: unused
    METADATA_SHARED_SECRET: '1234567842'
    METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90
    METALLB_POOL: 192.168.122.80-192.168.122.90
    MICROSHIFT: '0'
    NAMESPACE: openstack
    NETCONFIG: config/samples/network_v1beta1_netconfig.yaml
    NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml
    NETCONFIG_DEPL_IMG: unused
    NETOBSERV_DEPLOY_NAMESPACE: netobserv
    NETOBSERV_NAMESPACE: openshift-netobserv-operator
    NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net
    NETOBSERV_SUBSCRIPTION: netobserv-operator
    NETWORK_BGP: 'false'
    NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0
    NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0
    NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0
    NETWORK_ISOLATION: 'true'
    NETWORK_ISOLATION_INSTANCE_NAME: crc
    NETWORK_ISOLATION_IPV4: 'true'
    NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24
    NETWORK_ISOLATION_IPV4_NAT: 'true'
    NETWORK_ISOLATION_IPV6: 'false'
    NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64
    NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10
    NETWORK_ISOLATION_MAC: '52:54:00:11:11:10'
    NETWORK_ISOLATION_NETWORK_NAME: net-iso
    NETWORK_ISOLATION_NET_NAME: default
    NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true'
    NETWORK_MTU: '1500'
    NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0
    NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0
    NETWORK_STORAGE_MACVLAN: ''
    NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0
    NETWORK_VLAN_START: '20'
    NETWORK_VLAN_STEP: '1'
    NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml
    NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml
    NEUTRONAPI_DEPL_IMG: unused
    NEUTRON_BRANCH: main
    NEUTRON_COMMIT_HASH: ''
    NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest
    NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml
    NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests
    NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests
    NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git
    NFS_HOME: /home/nfs
    NMSTATE_NAMESPACE: openshift-nmstate
    NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8
    NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator
    NNCP_ADDITIONAL_HOST_ROUTES: ''
    NNCP_BGP_1_INTERFACE: enp7s0
    NNCP_BGP_1_IP_ADDRESS: 100.65.4.2
    NNCP_BGP_2_INTERFACE: enp8s0
    NNCP_BGP_2_IP_ADDRESS: 100.64.4.2
    NNCP_BRIDGE: ospbr
    NNCP_CLEANUP_TIMEOUT: 120s
    NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::'
    NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10'
    NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122
    NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10'
    NNCP_DNS_SERVER: 192.168.122.1
    NNCP_DNS_SERVER_IPV6: fd00:aaaa::1
    NNCP_GATEWAY: 192.168.122.1
    NNCP_GATEWAY_IPV6: fd00:aaaa::1
    NNCP_INTERFACE: enp6s0
    NNCP_NODES: ''
    NNCP_TIMEOUT: 240s
    NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml
    NOVA_BRANCH: main
    NOVA_COMMIT_HASH: ''
    NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml
    NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest
    NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git
    NUMBER_OF_INSTANCES: '1'
    OCP_NETWORK_NAME: crc
    OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml
    OCTAVIA_BRANCH: main
    OCTAVIA_COMMIT_HASH: ''
    OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml
    OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest
    OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml
    OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/tests/kuttl/tests
    OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests
    OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git
    OKD: 'false'
    OPENSTACK_BRANCH: main
    OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest
    OPENSTACK_COMMIT_HASH: ''
    OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml
    OPENSTACK_CRDS_DIR: openstack_crds
    OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml
    OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest
    OPENSTACK_K8S_BRANCH: main
    OPENSTACK_K8S_TAG: latest
    OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml
    OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/tests/kuttl/tests
    OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests
    OPENSTACK_NEUTRON_CUSTOM_CONF: ''
    OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git
    OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest
    OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator
    OPERATOR_CHANNEL: ''
    OPERATOR_NAMESPACE: openstack-operators
    OPERATOR_SOURCE: ''
    OPERATOR_SOURCE_NAMESPACE: ''
    OUT: /home/zuul/ci-framework-data/artifacts/manifests
    OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm
    OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml
    OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml
    OVNCONTROLLER_NMAP: 'true'
    OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml
    OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml
    OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml
    OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml
    OVN_BRANCH: main
    OVN_COMMIT_HASH: ''
    OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest
    OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml
    OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/tests/kuttl/tests
    OVN_KUTTL_NAMESPACE: ovn-kuttl-tests
    OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git
    PASSWORD: '12345678'
    PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml
    PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml
    PLACEMENTAPI_DEPL_IMG: unused
    PLACEMENT_BRANCH: main
    PLACEMENT_COMMIT_HASH: ''
    PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest
    PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml
    PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/tests/kuttl/tests
    PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests
    PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git
    PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/pull-secret.txt
    RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml
    RABBITMQ_BRANCH: patches
    RABBITMQ_COMMIT_HASH: ''
    RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml
    RABBITMQ_DEPL_IMG: unused
    RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest
    RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git
    REDHAT_OPERATORS: 'false'
    REDIS: config/samples/redis_v1beta1_redis.yaml
    REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml
    REDIS_DEPL_IMG: unused
    RH_REGISTRY_PWD: ''
    RH_REGISTRY_USER: ''
    SECRET: osp-secret
    SG_CORE_DEPL_IMG: unused
    STANDALONE_COMPUTE_DRIVER: libvirt
    STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0
    STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0
    STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0
    STANDALONE_STORAGE_NET_PREFIX: 172.18.0
    STANDALONE_TENANT_NET_PREFIX: 172.19.0
    STORAGEMGMT_HOST_ROUTES: ''
    STORAGE_CLASS: local-storage
    STORAGE_HOST_ROUTES: ''
    SWIFT: config/samples/swift_v1beta1_swift.yaml
    SWIFT_BRANCH: main
    SWIFT_COMMIT_HASH: ''
    SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml
    SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest
    SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml
    SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/tests/kuttl/tests
    SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests
    SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git
    TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml
    TELEMETRY_BRANCH: main
    TELEMETRY_COMMIT_HASH: ''
    TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml
    TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest
    TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator
    TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml
    TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests
    TELEMETRY_KUTTL_RELPATH: tests/kuttl/suites
    TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git
    TENANT_HOST_ROUTES: ''
    TIMEOUT: 300s
    TLS_ENABLED: 'false'
    WATCHER_BRANCH: ''
    WATCHER_REPO: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator
    tripleo_deploy: 'export REGISTRY_USER:'
cifmw_install_yamls_environment:
    BMO_SETUP: false
    CHECKOUT_FROM_OPENSTACK_REF: 'true'
    INSTALL_CERT_MANAGER: false
    KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig
    OPENSTACK_K8S_BRANCH: main
    OUT: /home/zuul/ci-framework-data/artifacts/manifests
    OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm
    WATCHER_BRANCH: ''
    WATCHER_REPO: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator
 encoding: base64 failed: false invocation: module_args: src: /home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml item: install-yamls-params.yml source: /home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml - ansible_loop_var: item changed: false content: Y2lmbXdfb3BlbnNoaWZ0X2FwaTogYXBpLmNyYy50ZXN0aW5nOjY0NDMKY2lmbXdfb3BlbnNoaWZ0X2NvbnRleHQ6IGRlZmF1bHQvYXBpLWNyYy10ZXN0aW5nOjY0NDMva3ViZWFkbWluCmNpZm13X29wZW5zaGlmdF9rdWJlY29uZmlnOiAvaG9tZS96dXVsLy5jcmMvbWFjaGluZXMvY3JjL2t1YmVjb25maWcKY2lmbXdfb3BlbnNoaWZ0X3Rva2VuOiBzaGEyNTZ+UDN3U2JBY25sbUVoclB6eHJuMDUwUjk0SDhBSnZGU25OY1lmOGJQT0EzYwpjaWZtd19vcGVuc2hpZnRfdXNlcjoga3ViZWFkbWluCg== encoding: base64 failed: false invocation: module_args: src: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml item: openshift-login-params.yml source: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml - ansible_loop_var: item changed: false content: cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw
cifmw_build_images_output: {}
cifmw_dlrn_report_result: false
cifmw_edpm_telemetry_enabled_exporters:
- podman_exporter
- openstack_network_exporter
cifmw_extras:
- '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework''].
    src_dir }}/scenarios/centos-9/multinode-ci.yml'
- '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework''].
    src_dir }}/scenarios/centos-9/horizon.yml'
- '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator''].
    src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml'
- '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator''].
    src_dir }}/ci/tests/watcher-tempest.yml'
cifmw_openshift_api: api.crc.testing:6443
cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig'
cifmw_openshift_password: '123456789'
cifmw_openshift_skip_tls_verify: true
cifmw_openshift_user: kubeadmin
cifmw_operator_build_output:
    operators:
        openstack-operator:
            git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9
            git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator
            image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9
            image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9
            image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9
        watcher-operator:
            git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d
            git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator
            image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d
            image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d
            image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d
cifmw_test_operator_tempest_external_plugin:
-   changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e
    changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin
    repository: https://opendev.org/openstack/watcher-tempest-plugin.git
cifmw_test_operator_tempest_image_tag: watcher_latest
cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'')
    | last }}'
cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'')
    | first }}'
cifmw_update_containers_openstack: false
cifmw_update_containers_org: podified-epoxy-centos9
cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'')
    | first }}'
cifmw_update_containers_tag: watcher_latest
cifmw_update_containers_watcher: true
cifmw_use_libvirt: false
cifmw_zuul_target_host: controller
content_provider_dlrn_md5_hash: ''
content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9
content_provider_registry_ip: 38.102.83.53
crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''')
    }}'
crc_ci_bootstrap_networking:
    instances:
        compute-0:
            networks:
                default:
                    ip: 192.168.122.100
                internal-api:
                    config_nm: false
                    ip: 172.17.0.100
                storage:
                    config_nm: false
                    ip: 172.18.0.100
                tenant:
                    config_nm: false
                    ip: 172.19.0.100
        compute-1:
            networks:
                default:
                    ip: 192.168.122.101
                internal-api:
                    config_nm: false
                    ip: 172.17.0.101
                storage:
                    config_nm: false
                    ip: 172.18.0.101
                tenant:
                    config_nm: false
                    ip: 172.19.0.101
        controller:
            networks:
                default:
                    ip: 192.168.122.11
        crc:
            networks:
                default:
                    ip: 192.168.122.10
                internal-api:
                    ip: 172.17.0.5
                storage:
                    ip: 172.18.0.5
                tenant:
                    ip: 172.19.0.5
    networks:
        default:
            mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}'
            range: 192.168.122.0/24
            router_net: ''
            transparent: true
        internal-api:
            range: 172.17.0.0/24
            vlan: 20
        storage:
            range: 172.18.0.0/24
            vlan: 21
        tenant:
            range: 172.19.0.0/24
            vlan: 22
enable_ramdisk: true
fetch_dlrn_hash: false
push_registry: quay.rdoproject.org
quay_login_secret_name: quay_nextgen_zuulgithubci
registry_login_enabled: true
watcher_scenario: edpm-no-notifications
watcher_services_tag: watcher_latest
watcher_tempest_max_microversion: '1.4'
zuul:
    _inheritance_path:
    - '<Job base-minimal branches: None source: config/zuul.d/jobs.yaml@master#24>'
    - '<Job base-crc-cloud branches: None source: config/zuul.d/_jobs-crc.yaml@master#235>'
    - '<Job cifmw-podified-multinode-edpm-base-crc branches: None source: openstack-k8s-operators/ci-framework/zuul.d/base.yaml@main#123>'
    - '<Job podified-multinode-edpm-deployment-crc branches: None source: openstack-k8s-operators/ci-framework/zuul.d/edpm_multinode.yaml@main#317>'
    - '<Job podified-multinode-edpm-deployment-crc-2comp branches: None source: openstack-k8s-operators/ci-framework/zuul.d/edpm_multinode.yaml@main#2>'
    - '<Job watcher-operator-base branches: {MatchAny:{BranchMatcher:master},{BranchMatcher:main}}
        source: openstack-k8s-operators/watcher-operator/.zuul.yaml@main#15>'
    - '<Job watcher-operator-validation-base branches: {MatchAny:{BranchMatcher:master},{BranchMatcher:main}}
        source: openstack-k8s-operators/watcher-operator/.zuul.yaml@main#75>'
    - '<Job watcher-operator-validation-epoxy branches: {MatchAny:{BranchMatcher:master},{BranchMatcher:main}}
        source: openstack-k8s-operators/watcher-operator/.zuul.yaml@main#150>'
    - '<Job watcher-operator-validation-epoxy-ocp4-16 branches: {MatchAny:{BranchMatcher:master},{BranchMatcher:main}}
        source: openstack-k8s-operators/watcher-operator/.zuul.yaml@main#192>'
    - '<Job watcher-operator-validation-epoxy-ocp4-16 branches: None source: openstack-k8s-operators/watcher-operator/.zuul.yaml@main#284>'
    ansible_version: '8'
    attempts: 1
    branch: main
    build: 9ce4c11f9f6a4904bf6148a8276a3232
    build_refs:
    -   branch: main
        change: '287'
        change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287
        commit_id: 14377136e67c9cd67507a059bfde2f19f140387d
        patchset: 14377136e67c9cd67507a059bfde2f19f140387d
        project:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/watcher-operator
            name: openstack-k8s-operators/watcher-operator
            short_name: watcher-operator
        src_dir: src/github.com/openstack-k8s-operators/watcher-operator
        topic: null
    buildset: f9416ac601264548b137ce1f44fe627c
    buildset_refs:
    -   branch: main
        change: '287'
        change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287
        commit_id: 14377136e67c9cd67507a059bfde2f19f140387d
        patchset: 14377136e67c9cd67507a059bfde2f19f140387d
        project:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/watcher-operator
            name: openstack-k8s-operators/watcher-operator
            short_name: watcher-operator
        src_dir: src/github.com/openstack-k8s-operators/watcher-operator
        topic: null
    change: '287'
    change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287
    child_jobs: []
    commit_id: 14377136e67c9cd67507a059bfde2f19f140387d
    event_id: 7dde6e80-a2f2-11f0-83f1-b4af7183f5ac
    executor:
        hostname: ze01.softwarefactory-project.io
        inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/inventory.yaml
        log_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/logs
        result_data_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/results.json
        src_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/src
        work_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work
    items:
    -   branch: main
        change: '287'
        change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287
        commit_id: 14377136e67c9cd67507a059bfde2f19f140387d
        patchset: 14377136e67c9cd67507a059bfde2f19f140387d
        project:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/watcher-operator
            name: openstack-k8s-operators/watcher-operator
            short_name: watcher-operator
            src_dir: src/github.com/openstack-k8s-operators/watcher-operator
        topic: null
    job: watcher-operator-validation-epoxy-ocp4-16
    jobtags: []
    max_attempts: 1
    message: W1dJUF0gTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIHRvIGNvbnRyb2xwbGFuZSBsZXZlbAoKTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIGZyb20gZW5hYmxpbmcgYXQgbm92YS9jaW5kZXIvd2F0Y2hlciBsZXZlbCB0byBvcGVuc3RhY2sgY29udHJvbHBsYW5lIGxldmVsIGFmdGVyIHRoYXQgdXNhZ2UgaXMgYXZhaWxhYmxlIHNpbmNlIGh0dHBzOi8vZ2l0aHViLmNvbS9vcGVuc3RhY2stazhzLW9wZXJhdG9ycy9vcGVuc3RhY2stb3BlcmF0b3IvcHVsbC8xNTkx
    patchset: 14377136e67c9cd67507a059bfde2f19f140387d
    pipeline: github-check
    playbook_context:
        playbook_projects:
            trusted/project_0/review.rdoproject.org/config:
                canonical_name: review.rdoproject.org/config
                checkout: master
                commit: 941f6f7666fdff0145523beb29ceda8db25c234c
            trusted/project_1/opendev.org/zuul/zuul-jobs:
                canonical_name: opendev.org/zuul/zuul-jobs
                checkout: master
                commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df
            trusted/project_2/review.rdoproject.org/rdo-jobs:
                canonical_name: review.rdoproject.org/rdo-jobs
                checkout: master
                commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4
            trusted/project_3/github.com/openstack-k8s-operators/ci-framework:
                canonical_name: github.com/openstack-k8s-operators/ci-framework
                checkout: main
                commit: 35b8986b014c5316d873d58c20dfc131ae44aa83
            untrusted/project_0/github.com/openstack-k8s-operators/ci-framework:
                canonical_name: github.com/openstack-k8s-operators/ci-framework
                checkout: main
                commit: 35b8986b014c5316d873d58c20dfc131ae44aa83
            untrusted/project_1/review.rdoproject.org/config:
                canonical_name: review.rdoproject.org/config
                checkout: master
                commit: 941f6f7666fdff0145523beb29ceda8db25c234c
            untrusted/project_2/opendev.org/zuul/zuul-jobs:
                canonical_name: opendev.org/zuul/zuul-jobs
                checkout: master
                commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df
            untrusted/project_3/review.rdoproject.org/rdo-jobs:
                canonical_name: review.rdoproject.org/rdo-jobs
                checkout: master
                commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4
        playbooks:
        -   path: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/edpm/run.yml
            roles:
            -   checkout: main
                checkout_description: playbook branch
                link_name: ansible/playbook_0/role_0/ci-framework
                link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework
                role_path: ansible/playbook_0/role_0/ci-framework/roles
            -   checkout: master
                checkout_description: project default branch
                link_name: ansible/playbook_0/role_1/config
                link_target: untrusted/project_1/review.rdoproject.org/config
                role_path: ansible/playbook_0/role_1/config/roles
            -   checkout: master
                checkout_description: project default branch
                link_name: ansible/playbook_0/role_2/zuul-jobs
                link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs
                role_path: ansible/playbook_0/role_2/zuul-jobs/roles
            -   checkout: master
                checkout_description: project default branch
                link_name: ansible/playbook_0/role_3/rdo-jobs
                link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs
                role_path: ansible/playbook_0/role_3/rdo-jobs/roles
    post_review: false
    project:
        canonical_hostname: github.com
        canonical_name: github.com/openstack-k8s-operators/watcher-operator
        name: openstack-k8s-operators/watcher-operator
        short_name: watcher-operator
        src_dir: src/github.com/openstack-k8s-operators/watcher-operator
    projects:
        github.com/crc-org/crc-cloud:
            canonical_hostname: github.com
            canonical_name: github.com/crc-org/crc-cloud
            checkout: main
            checkout_description: project override ref
            commit: f6ed2f2d118884a075895bbf954ff6000e540430
            name: crc-org/crc-cloud
            required: true
            short_name: crc-cloud
            src_dir: src/github.com/crc-org/crc-cloud
        github.com/openstack-k8s-operators/ci-framework:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/ci-framework
            checkout: main
            checkout_description: zuul branch
            commit: 35b8986b014c5316d873d58c20dfc131ae44aa83
            name: openstack-k8s-operators/ci-framework
            required: true
            short_name: ci-framework
            src_dir: src/github.com/openstack-k8s-operators/ci-framework
        github.com/openstack-k8s-operators/edpm-ansible:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/edpm-ansible
            checkout: main
            checkout_description: zuul branch
            commit: 95aa63de3182faad63a69301d101debad3efc936
            name: openstack-k8s-operators/edpm-ansible
            required: true
            short_name: edpm-ansible
            src_dir: src/github.com/openstack-k8s-operators/edpm-ansible
        github.com/openstack-k8s-operators/infra-operator:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/infra-operator
            checkout: main
            checkout_description: zuul branch
            commit: 2b5048bbcae44dfeaacbb43830318ca45c13f182
            name: openstack-k8s-operators/infra-operator
            required: true
            short_name: infra-operator
            src_dir: src/github.com/openstack-k8s-operators/infra-operator
        github.com/openstack-k8s-operators/install_yamls:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/install_yamls
            checkout: main
            checkout_description: zuul branch
            commit: bb26118ddc70016cbd2118a0b0a35d5f6ab9c343
            name: openstack-k8s-operators/install_yamls
            required: true
            short_name: install_yamls
            src_dir: src/github.com/openstack-k8s-operators/install_yamls
        github.com/openstack-k8s-operators/openstack-baremetal-operator:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator
            checkout: main
            checkout_description: zuul branch
            commit: 3bf7652f010ead15ac2d2fec7e3b71c442b8fb8d
            name: openstack-k8s-operators/openstack-baremetal-operator
            required: true
            short_name: openstack-baremetal-operator
            src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator
        github.com/openstack-k8s-operators/openstack-must-gather:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/openstack-must-gather
            checkout: main
            checkout_description: zuul branch
            commit: 748dff8508cbb49e00426d46a4487b9f4c0b0096
            name: openstack-k8s-operators/openstack-must-gather
            required: true
            short_name: openstack-must-gather
            src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather
        github.com/openstack-k8s-operators/openstack-operator:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/openstack-operator
            checkout: main
            checkout_description: zuul branch
            commit: 245af87e94976809f2023f59c19dffb95df97ed9
            name: openstack-k8s-operators/openstack-operator
            required: true
            short_name: openstack-operator
            src_dir: src/github.com/openstack-k8s-operators/openstack-operator
        github.com/openstack-k8s-operators/repo-setup:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/repo-setup
            checkout: main
            checkout_description: zuul branch
            commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f
            name: openstack-k8s-operators/repo-setup
            required: true
            short_name: repo-setup
            src_dir: src/github.com/openstack-k8s-operators/repo-setup
        github.com/openstack-k8s-operators/watcher-operator:
            canonical_hostname: github.com
            canonical_name: github.com/openstack-k8s-operators/watcher-operator
            checkout: main
            checkout_description: zuul branch
            commit: 14377136e67c9cd67507a059bfde2f19f140387d
            name: openstack-k8s-operators/watcher-operator
            required: false
            short_name: watcher-operator
            src_dir: src/github.com/openstack-k8s-operators/watcher-operator
        opendev.org/zuul/zuul-jobs:
            canonical_hostname: opendev.org
            canonical_name: opendev.org/zuul/zuul-jobs
            checkout: master
            checkout_description: project default branch
            commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df
            name: zuul/zuul-jobs
            required: true
            short_name: zuul-jobs
            src_dir: src/opendev.org/zuul/zuul-jobs
        review.rdoproject.org/config:
            canonical_hostname: review.rdoproject.org
            canonical_name: review.rdoproject.org/config
            checkout: master
            checkout_description: project default branch
            commit: 941f6f7666fdff0145523beb29ceda8db25c234c
            name: config
            required: true
            short_name: config
            src_dir: src/review.rdoproject.org/config
    ref: refs/pull/287/head
    resources: {}
    tenant: rdoproject.org
    timeout: 10800
    topic: null
    voting: true
zuul_log_collection: false
 encoding: base64 failed: false invocation: module_args: src: /home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml item: zuul-params.yml source: /home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml skipped: false ansible_all_ipv4_addresses: - 38.102.83.51 ansible_all_ipv6_addresses: - fe80::f816:3eff:fe6f:820 ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_config_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2025-10-06' day: '06' epoch: '1759785143' epoch_int: '1759785143' hour: '21' iso8601: '2025-10-06T21:12:23Z' iso8601_basic: 20251006T211223407560 iso8601_basic_short: 20251006T211223 iso8601_micro: '2025-10-06T21:12:23.407560Z' minute: '12' month: '10' second: '23' time: '21:12:23' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' ansible_default_ipv4: address: 38.102.83.51 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:6f:08:20 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-10-06-20-56-29-00 vda1: - 1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-56-29-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 1631a6ad-43b8-436d-ae76-16fa14b94458 sectors: '83883999' sectorsize: 512 size: 40.00 GB start: '2048' uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '83886080' sectorsize: '512' size: 40.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: CentOS ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/centos-release ansible_distribution_file_variety: CentOS ansible_distribution_major_version: '9' ansible_distribution_release: Stream ansible_distribution_version: '9' ansible_dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 35354 22 SSH_CONNECTION: 38.102.83.114 35354 38.102.83.51 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '12' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f ansible_eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.51 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe6f:820 prefix: '64' scope: link macaddress: fa:16:3e:6f:08:20 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.51 all_ipv6_addresses: - fe80::f816:3eff:fe6f:820 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 crc_ci_bootstrap_instance_default_net_config: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: ip: 172.17.0.5 - key: storage value: ip: 172.18.0.5 - key: tenant value: ip: 172.19.0.5 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2025-10-06T21:00:58Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-10.openstacklocal. hostname: host-192-168-122-10 ip_address: 192.168.122.10 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.10 subnet_id: 139175d8-a9e6-4e3b-931b-a0af30583742 hardware_offload_type: null hints: '' id: 81f69e80-a4c7-43d8-ad59-1b24fcb3acbf ip_allocation: immediate mac_address: fa:16:3e:36:76:9b name: crc-bfd057b4-b43d-4dc5-bc10-e91bf10a649b network_id: febb7485-9e12-4711-8dc7-e207293e25de numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2025-10-06T21:00:58Z' crc_ci_bootstrap_network_name: zuul-ci-net-9ce4c11f crc_ci_bootstrap_networks_out: compute-0: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:1a:6b:7b mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:ec:df:aa mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:e1:d1:4a mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:6e:fd:3e mtu: '1496' parent_iface: eth1 vlan: 22 compute-1: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.101/24 mac: fa:16:3e:cb:47:1e mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.101/24 mac: 52:54:00:69:15:f1 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.101/24 mac: 52:54:00:c3:9b:d0 mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.101/24 mac: 52:54:00:1d:8d:c8 mtu: '1496' parent_iface: eth1 vlan: 22 controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:fc:47:4f mtu: '1500' crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:36:76:9b mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:aa:79:c3 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:bd:b2:92 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:86:1f:43 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-10-06T20:59:40Z' description: '' dns_domain: '' id: febb7485-9e12-4711-8dc7-e207293e25de ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: true l2_adjacency: true mtu: 1500 name: zuul-ci-net-9ce4c11f port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2025-10-06T20:59:40Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-10-06T20:59:45Z' description: '' enable_ndp_proxy: null external_gateway_info: null flavor_id: null id: 4c5c07da-6180-4e43-8bfc-7faf50c6c9a5 name: zuul-ci-subnet-router-9ce4c11f project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 1 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2025-10-06T20:59:45Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2025-10-06T20:59:43Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: 139175d8-a9e6-4e3b-931b-a0af30583742 ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-9ce4c11f network_id: febb7485-9e12-4711-8dc7-e207293e25de project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2025-10-06T20:59:43Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-9ce4c11f crc_ci_bootstrap_subnet_name: zuul-ci-subnet-9ce4c11f date_time: date: '2025-10-06' day: '06' epoch: '1759785143' epoch_int: '1759785143' hour: '21' iso8601: '2025-10-06T21:12:23Z' iso8601_basic: 20251006T211223407560 iso8601_basic_short: 20251006T211223 iso8601_micro: '2025-10-06T21:12:23.407560Z' minute: '12' month: '10' second: '23' time: '21:12:23' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' default_ipv4: address: 38.102.83.51 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:6f:08:20 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-10-06-20-56-29-00 vda1: - 1631a6ad-43b8-436d-ae76-16fa14b94458 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-56-29-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 1631a6ad-43b8-436d-ae76-16fa14b94458 sectors: '83883999' sectorsize: 512 size: 40.00 GB start: '2048' uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '83886080' sectorsize: '512' size: 40.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 35354 22 SSH_CONNECTION: 38.102.83.114 35354 38.102.83.51 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '12' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.51 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe6f:820 prefix: '64' scope: link macaddress: fa:16:3e:6f:08:20 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: controller gather_subset: - min hostname: controller hostnqn: nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0 interfaces: - eth0 - lo is_chroot: false iscsi_iqn: '' kernel: 5.14.0-620.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.03 1m: 0.14 5m: 0.09 locally_reachable_ips: ipv4: - 38.102.83.51 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe6f:820 lsb: {} lvm: N/A machine: x86_64 machine_id: 42833e1b511a402df82cb9cb2fc36491 memfree_mb: 3293 memory_mb: nocache: free: 3444 used: 211 real: free: 3293 total: 3655 used: 362 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 3655 module_setup: true mounts: - block_available: 9966449 block_size: 4096 block_total: 10469115 block_used: 502666 device: /dev/vda1 fstype: xfs inode_available: 20916775 inode_total: 20970992 inode_used: 54217 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 40822575104 size_total: 42881495040 uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 nodename: controller os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 2 processor_nproc: 2 processor_threads_per_core: 1 processor_vcpus: 2 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.2.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 23 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 23 - final - 0 python_version: 3.9.23 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDpEwzeDGLwNlfP3Up6vCxCw7kSSu0AiDUvDH/J+EepxMPGLLpzT0wX+lEXL9GArqfNU/UBUmiiwh9dZO9tQ5bk= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIPrlPStzDnFCaI6YFfPj0aQKsBPAAZFkT8awb2RrAe7g ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQCsnSYzB9ciMqmgm0l3UC4GvkzqKIRU4HQjm2Wmmz4ONumnWKBZyfZPUd4C1zWgntSd7/HfwyQS5GOnhobA5K/1o855yq/Qr6a2M0JVvxnLdxB089mymIDZ9Z5iXDsVHJNPHKuz3pjoZDbA5XzpQPsDbEeMHpBd0Yz5DQaAPYYN1wg4Wtq6PK86i2jV8qtVH7OnCkn06futt/HtJ4eADwKZV6cutqDHmuTqXwagLJ7PWTm0H9xAYR/Tsgd28krH/EIdcyHBACqdSrk6FWPOdZ1Q5PjVC0ZOHemQeiRhmYW5NgxnnEgSmoTMCyMKRYbVcMYeHKRBg/rXhLSbymoU+eF+Kza486CELgT9KG4Z0NTOmyzNu1ee8G0ZOaowjIQ8Gr6e15WUMLbskShDGqXlAnaRHOAQhBSGCkt0N9KMyGaBdFYVzJOgqi1erPoCN1pLe7Ljr44blAH6Yvp9H8Ji4mLuVYB7PmDHL0Mb4zkjqi/MU9Okx1escBZI4ASrkaXkp18= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 135 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack zuul_change_list: - watcher-operator ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: controller ansible_host: 38.102.83.51 ansible_hostname: controller ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0 ansible_interfaces: - eth0 - lo ansible_inventory_sources: - /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/inventory.yaml ansible_is_chroot: false ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-620.el9.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.03 1m: 0.14 5m: 0.09 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.51 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe6f:820 ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 42833e1b511a402df82cb9cb2fc36491 ansible_memfree_mb: 3293 ansible_memory_mb: nocache: free: 3444 used: 211 real: free: 3293 total: 3655 used: 362 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 3655 ansible_mounts: - block_available: 9966449 block_size: 4096 block_total: 10469115 block_used: 502666 device: /dev/vda1 fstype: xfs inode_available: 20916775 inode_total: 20970992 inode_used: 54217 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 40822575104 size_total: 42881495040 uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_nodename: controller ansible_os_family: RedHat ansible_pkg_mgr: dnf ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 2 ansible_processor_nproc: 2 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 2 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.2.1 ansible_python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 23 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 23 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.23 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDpEwzeDGLwNlfP3Up6vCxCw7kSSu0AiDUvDH/J+EepxMPGLLpzT0wX+lEXL9GArqfNU/UBUmiiwh9dZO9tQ5bk= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIPrlPStzDnFCaI6YFfPj0aQKsBPAAZFkT8awb2RrAe7g ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQCsnSYzB9ciMqmgm0l3UC4GvkzqKIRU4HQjm2Wmmz4ONumnWKBZyfZPUd4C1zWgntSd7/HfwyQS5GOnhobA5K/1o855yq/Qr6a2M0JVvxnLdxB089mymIDZ9Z5iXDsVHJNPHKuz3pjoZDbA5XzpQPsDbEeMHpBd0Yz5DQaAPYYN1wg4Wtq6PK86i2jV8qtVH7OnCkn06futt/HtJ4eADwKZV6cutqDHmuTqXwagLJ7PWTm0H9xAYR/Tsgd28krH/EIdcyHBACqdSrk6FWPOdZ1Q5PjVC0ZOHemQeiRhmYW5NgxnnEgSmoTMCyMKRYbVcMYeHKRBg/rXhLSbymoU+eF+Kza486CELgT9KG4Z0NTOmyzNu1ee8G0ZOaowjIQ8Gr6e15WUMLbskShDGqXlAnaRHOAQhBSGCkt0N9KMyGaBdFYVzJOgqi1erPoCN1pLe7Ljr44blAH6Yvp9H8Ji4mLuVYB7PmDHL0Mb4zkjqi/MU9Okx1escBZI4ASrkaXkp18= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 135 ansible_user: zuul ansible_user_dir: /home/zuul ansible_user_gecos: '' ansible_user_gid: 1000 ansible_user_id: zuul ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_basedir: /home/zuul/ci-framework-data cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/multinode-ci.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/horizon.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/scenarios/edpm-no-notifications.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cifmw_status: changed: false failed: false stat: atime: 1759784988.6636567 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: binary ctime: 1759784979.0963836 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 8535629 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1759784979.0963836 nlink: 21 path: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 4096 uid: 1000 version: '2141595077' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true cifmw_success_flag: changed: false failed: false stat: exists: false cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: podified-epoxy-centos9 cifmw_test_operator_tempest_registry: 38.102.83.53:5001 cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: 38.102.83.53:5001 cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: vexxhost crc_ci_bootstrap_instance_default_net_config: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: ip: 172.17.0.5 - key: storage value: ip: 172.18.0.5 - key: tenant value: ip: 172.19.0.5 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2025-10-06T21:00:58Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-10.openstacklocal. hostname: host-192-168-122-10 ip_address: 192.168.122.10 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.10 subnet_id: 139175d8-a9e6-4e3b-931b-a0af30583742 hardware_offload_type: null hints: '' id: 81f69e80-a4c7-43d8-ad59-1b24fcb3acbf ip_allocation: immediate mac_address: fa:16:3e:36:76:9b name: crc-bfd057b4-b43d-4dc5-bc10-e91bf10a649b network_id: febb7485-9e12-4711-8dc7-e207293e25de numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2025-10-06T21:00:58Z' crc_ci_bootstrap_network_name: zuul-ci-net-9ce4c11f crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 crc_ci_bootstrap_networks_out: compute-0: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:1a:6b:7b mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:ec:df:aa mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:e1:d1:4a mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:6e:fd:3e mtu: '1496' parent_iface: eth1 vlan: 22 compute-1: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.101/24 mac: fa:16:3e:cb:47:1e mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.101/24 mac: 52:54:00:69:15:f1 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.101/24 mac: 52:54:00:c3:9b:d0 mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.101/24 mac: 52:54:00:1d:8d:c8 mtu: '1496' parent_iface: eth1 vlan: 22 controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:fc:47:4f mtu: '1500' crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:36:76:9b mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:aa:79:c3 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:bd:b2:92 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:86:1f:43 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-10-06T20:59:40Z' description: '' dns_domain: '' id: febb7485-9e12-4711-8dc7-e207293e25de ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: true l2_adjacency: true mtu: 1500 name: zuul-ci-net-9ce4c11f port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2025-10-06T20:59:40Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-10-06T20:59:45Z' description: '' enable_ndp_proxy: null external_gateway_info: null flavor_id: null id: 4c5c07da-6180-4e43-8bfc-7faf50c6c9a5 name: zuul-ci-subnet-router-9ce4c11f project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 1 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2025-10-06T20:59:45Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2025-10-06T20:59:43Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: 139175d8-a9e6-4e3b-931b-a0af30583742 ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-9ce4c11f network_id: febb7485-9e12-4711-8dc7-e207293e25de project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2025-10-06T20:59:43Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-9ce4c11f crc_ci_bootstrap_subnet_name: zuul-ci-subnet-9ce4c11f discovered_interpreter_python: /usr/bin/python3 enable_ramdisk: true fetch_dlrn_hash: false gather_subset: - min group_names: - ungrouped groups: all: - compute-0 - compute-1 - controller - crc computes: - compute-0 - compute-1 ocps: - crc ungrouped: *id001 zuul_unreachable: [] inventory_dir: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0 inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/inventory.yaml inventory_hostname: controller inventory_hostname_short: controller logfiles_dest_dir: /home/zuul/ci-framework-data/logs/2025-10-06_21-12 module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: e795da60-c8d6-4446-ba72-4fddfe4bf7ea host_id: 5519e7a0ee5dc826795d295efc9c908d171b61deb9bf71b1016f861f interface_ip: 38.102.83.51 label: cloud-centos-9-stream-tripleo-medium private_ipv4: 38.102.83.51 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.51 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__7c6be8c090d39d526a4b5005be5e95dd82bdf54e param_dir: changed: false failed: false stat: atime: 1759785108.2661083 attr_flags: '' attributes: [] block_size: 4096 blocks: 0 charset: binary ctime: 1759785082.7693696 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 67177381 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1759785082.7693696 nlink: 2 path: /home/zuul/ci-framework-data/artifacts/parameters pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 120 uid: 1000 version: '2370786047' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true playbook_dir: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true unsafe_vars: ansible_connection: ssh ansible_host: 38.102.83.51 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: e795da60-c8d6-4446-ba72-4fddfe4bf7ea host_id: 5519e7a0ee5dc826795d295efc9c908d171b61deb9bf71b1016f861f interface_ip: 38.102.83.51 label: cloud-centos-9-stream-tripleo-medium private_ipv4: 38.102.83.51 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.51 public_ipv6: '' region: RegionOne slot: null push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul_log_collection: false watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 9ce4c11f9f6a4904bf6148a8276a3232 build_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null buildset: f9416ac601264548b137ce1f44fe627c buildset_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 child_jobs: [] commit_id: 14377136e67c9cd67507a059bfde2f19f140387d event_id: 7dde6e80-a2f2-11f0-83f1-b4af7183f5ac executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/logs result_data_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/results.json src_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/src work_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work items: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null job: watcher-operator-validation-epoxy-ocp4-16 jobtags: [] max_attempts: 1 message: W1dJUF0gTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIHRvIGNvbnRyb2xwbGFuZSBsZXZlbAoKTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIGZyb20gZW5hYmxpbmcgYXQgbm92YS9jaW5kZXIvd2F0Y2hlciBsZXZlbCB0byBvcGVuc3RhY2sgY29udHJvbHBsYW5lIGxldmVsIGFmdGVyIHRoYXQgdXNhZ2UgaXMgYXZhaWxhYmxlIHNpbmNlIGh0dHBzOi8vZ2l0aHViLmNvbS9vcGVuc3RhY2stazhzLW9wZXJhdG9ycy9vcGVuc3RhY2stb3BlcmF0b3IvcHVsbC8xNTkx patchset: 14377136e67c9cd67507a059bfde2f19f140387d pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 playbooks: - path: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/edpm/run.yml roles: - checkout: main checkout_description: playbook branch link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: f6ed2f2d118884a075895bbf954ff6000e540430 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: zuul branch commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: zuul branch commit: 95aa63de3182faad63a69301d101debad3efc936 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: zuul branch commit: 2b5048bbcae44dfeaacbb43830318ca45c13f182 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: zuul branch commit: bb26118ddc70016cbd2118a0b0a35d5f6ab9c343 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: main checkout_description: zuul branch commit: 3bf7652f010ead15ac2d2fec7e3b71c442b8fb8d name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: zuul branch commit: 748dff8508cbb49e00426d46a4487b9f4c0b0096 name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: zuul branch commit: 245af87e94976809f2023f59c19dffb95df97ed9 name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: zuul branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup github.com/openstack-k8s-operators/watcher-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator checkout: main checkout_description: zuul branch commit: 14377136e67c9cd67507a059bfde2f19f140387d name: openstack-k8s-operators/watcher-operator required: false short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 941f6f7666fdff0145523beb29ceda8db25c234c name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/287/head resources: {} tenant: rdoproject.org timeout: 10800 topic: null voting: true zuul_change_list: - watcher-operator zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '0' zuul_execution_trusted: 'False' zuul_log_collection: false zuul_success: 'False' zuul_will_retry: 'False' crc: ansible_all_ipv4_addresses: - 38.102.83.110 - 192.168.126.11 ansible_all_ipv6_addresses: - fe80::11a5:cf8e:c9b5:dc91 ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_br_ex: active: true device: br-ex features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.110 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::11a5:cf8e:c9b5:dc91 prefix: '64' scope: link macaddress: fa:16:3e:91:5d:af mtu: 1500 promisc: true timestamping: [] type: ether ansible_br_int: active: false device: br-int features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 4e:ec:11:72:80:3b mtu: 1400 promisc: true timestamping: [] type: ether ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-8a7990dabf52ac75b58b2f3e4b0ab7fa03a563df103fbd3b4d71c823481c83ff/vmlinuz-5.14.0-427.22.1.el9_4.x86_64 boot: UUID=6ea7ef63-bc43-49c4-9337-b3b14ffb2763 cgroup_no_v1: all ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/8a7990dabf52ac75b58b2f3e4b0ab7fa03a563df103fbd3b4d71c823481c83ff/0 psi: '1' root: UUID=68d6f3e9-64e9-44a4-a1d0-311f9c629a01 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' ansible_config_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2025-10-06' day: '06' epoch: '1759784332' epoch_int: '1759784332' hour: '20' iso8601: '2025-10-06T20:58:52Z' iso8601_basic: 20251006T205852991477 iso8601_basic_short: 20251006T205852 iso8601_micro: '2025-10-06T20:58:52.991477Z' minute: '58' month: '10' second: '52' time: '20:58:52' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' ansible_default_ipv4: address: 38.102.83.110 alias: br-ex broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: br-ex macaddress: fa:16:3e:91:5d:af mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 vda2: - EFI-SYSTEM vda3: - boot vda4: - root masters: {} uuids: sr0: - 2025-10-06-20-54-17-00 vda2: - 7B77-95E7 vda3: - 6ea7ef63-bc43-49c4-9337-b3b14ffb2763 vda4: - 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 ansible_devices: sr0: holders: [] host: 'IDE interface: Intel Corporation 82371SB PIIX3 IDE [Natoma/Triton II]' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-54-17-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '0' vendor: QEMU virtual: 1 vda: holders: [] host: 'SCSI storage controller: Red Hat, Inc. Virtio block device' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: [] sectors: '2048' sectorsize: 512 size: 1.00 MB start: '2048' uuid: null vda2: holders: [] links: ids: [] labels: - EFI-SYSTEM masters: [] uuids: - 7B77-95E7 sectors: '260096' sectorsize: 512 size: 127.00 MB start: '4096' uuid: 7B77-95E7 vda3: holders: [] links: ids: [] labels: - boot masters: [] uuids: - 6ea7ef63-bc43-49c4-9337-b3b14ffb2763 sectors: '786432' sectorsize: 512 size: 384.00 MB start: '264192' uuid: 6ea7ef63-bc43-49c4-9337-b3b14ffb2763 vda4: holders: [] links: ids: [] labels: - root masters: [] uuids: - 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 sectors: '166721503' sectorsize: 512 size: 79.50 GB start: '1050624' uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '419430400' sectorsize: '512' size: 200.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: RedHat ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/redhat-release ansible_distribution_file_search_string: Red Hat ansible_distribution_file_variety: RedHat ansible_distribution_major_version: '4' ansible_distribution_release: NA ansible_distribution_version: '4.16' ansible_dns: nameservers: - 199.204.44.24 - 199.204.47.54 ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_ens3: active: true device: ens3 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: fa:16:3e:91:5d:af module: virtio_net mtu: 1500 pciid: virtio1 promisc: true speed: -1 timestamping: [] type: ether ansible_env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus HOME: /var/home/core LANG: C.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: core MOTD_SHOWN: pam PATH: /var/home/core/.local/bin:/var/home/core/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /var/home/core SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 47220 22 SSH_CONNECTION: 38.102.83.114 47220 38.102.83.110 22 USER: core XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '2' XDG_SESSION_TYPE: tty _: /usr/bin/python3.9 which_declare: declare -f ansible_eth10: active: true device: eth10 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 192.168.126.11 broadcast: 192.168.126.255 netmask: 255.255.255.0 network: 192.168.126.0 prefix: '24' macaddress: a6:13:dc:c2:bf:da mtu: 1500 promisc: false timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.110 - 192.168.126.11 all_ipv6_addresses: - fe80::11a5:cf8e:c9b5:dc91 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA br_ex: active: true device: br-ex features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.110 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::11a5:cf8e:c9b5:dc91 prefix: '64' scope: link macaddress: fa:16:3e:91:5d:af mtu: 1500 promisc: true timestamping: [] type: ether br_int: active: false device: br-int features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 4e:ec:11:72:80:3b mtu: 1400 promisc: true timestamping: [] type: ether chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-8a7990dabf52ac75b58b2f3e4b0ab7fa03a563df103fbd3b4d71c823481c83ff/vmlinuz-5.14.0-427.22.1.el9_4.x86_64 boot: UUID=6ea7ef63-bc43-49c4-9337-b3b14ffb2763 cgroup_no_v1: all ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/8a7990dabf52ac75b58b2f3e4b0ab7fa03a563df103fbd3b4d71c823481c83ff/0 psi: '1' root: UUID=68d6f3e9-64e9-44a4-a1d0-311f9c629a01 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' date_time: date: '2025-10-06' day: '06' epoch: '1759784332' epoch_int: '1759784332' hour: '20' iso8601: '2025-10-06T20:58:52Z' iso8601_basic: 20251006T205852991477 iso8601_basic_short: 20251006T205852 iso8601_micro: '2025-10-06T20:58:52.991477Z' minute: '58' month: '10' second: '52' time: '20:58:52' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' default_ipv4: address: 38.102.83.110 alias: br-ex broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: br-ex macaddress: fa:16:3e:91:5d:af mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 vda2: - EFI-SYSTEM vda3: - boot vda4: - root masters: {} uuids: sr0: - 2025-10-06-20-54-17-00 vda2: - 7B77-95E7 vda3: - 6ea7ef63-bc43-49c4-9337-b3b14ffb2763 vda4: - 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 devices: sr0: holders: [] host: 'IDE interface: Intel Corporation 82371SB PIIX3 IDE [Natoma/Triton II]' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-54-17-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '0' vendor: QEMU virtual: 1 vda: holders: [] host: 'SCSI storage controller: Red Hat, Inc. Virtio block device' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: [] sectors: '2048' sectorsize: 512 size: 1.00 MB start: '2048' uuid: null vda2: holders: [] links: ids: [] labels: - EFI-SYSTEM masters: [] uuids: - 7B77-95E7 sectors: '260096' sectorsize: 512 size: 127.00 MB start: '4096' uuid: 7B77-95E7 vda3: holders: [] links: ids: [] labels: - boot masters: [] uuids: - 6ea7ef63-bc43-49c4-9337-b3b14ffb2763 sectors: '786432' sectorsize: 512 size: 384.00 MB start: '264192' uuid: 6ea7ef63-bc43-49c4-9337-b3b14ffb2763 vda4: holders: [] links: ids: [] labels: - root masters: [] uuids: - 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 sectors: '166721503' sectorsize: 512 size: 79.50 GB start: '1050624' uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '419430400' sectorsize: '512' size: 200.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3.9 distribution: RedHat distribution_file_parsed: true distribution_file_path: /etc/redhat-release distribution_file_search_string: Red Hat distribution_file_variety: RedHat distribution_major_version: '4' distribution_release: NA distribution_version: '4.16' dns: nameservers: - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 ens3: active: true device: ens3 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: fa:16:3e:91:5d:af module: virtio_net mtu: 1500 pciid: virtio1 promisc: true speed: -1 timestamping: [] type: ether env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus HOME: /var/home/core LANG: C.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: core MOTD_SHOWN: pam PATH: /var/home/core/.local/bin:/var/home/core/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /var/home/core SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 47220 22 SSH_CONNECTION: 38.102.83.114 47220 38.102.83.110 22 USER: core XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '2' XDG_SESSION_TYPE: tty _: /usr/bin/python3.9 which_declare: declare -f eth10: active: true device: eth10 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 192.168.126.11 broadcast: 192.168.126.255 netmask: 255.255.255.0 network: 192.168.126.0 prefix: '24' macaddress: a6:13:dc:c2:bf:da mtu: 1500 promisc: false timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: crc gather_subset: - all hostname: crc hostnqn: nqn.2014-08.org.nvmexpress:uuid:fe28b1dc-f424-4106-9c95-00604d2bcd5f interfaces: - ens3 - ovs-system - eth10 - ovn-k8s-mp0 - lo - br-int - br-ex is_chroot: true iscsi_iqn: iqn.1994-05.com.redhat:24fed7ce643e kernel: 5.14.0-427.22.1.el9_4.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Mon Jun 10 09:23:36 EDT 2024' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] netns_local: on [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: on [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.15 1m: 0.39 5m: 0.33 locally_reachable_ips: ipv4: - 38.102.83.110 - 127.0.0.0/8 - 127.0.0.1 - 192.168.126.11 ipv6: - ::1 - fe80::11a5:cf8e:c9b5:dc91 lsb: {} lvm: N/A machine: x86_64 machine_id: c1bd596843fb445da20eca66471ddf66 memfree_mb: 28958 memory_mb: nocache: free: 30299 used: 1796 real: free: 28958 total: 32095 used: 3137 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 32095 module_setup: true mounts: - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: /sysroot options: ro,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: /etc options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: /usr options: ro,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: /sysroot/ostree/deploy/rhcos/var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: /var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 221344 block_size: 1024 block_total: 358271 block_used: 136927 device: /dev/vda3 fstype: ext4 inode_available: 97936 inode_total: 98304 inode_used: 368 mount: /boot options: ro,seclabel,nosuid,nodev,relatime size_available: 226656256 size_total: 366869504 uuid: 6ea7ef63-bc43-49c4-9337-b3b14ffb2763 - block_available: 0 block_size: 2048 block_total: 241 block_used: 241 device: /dev/sr0 fstype: iso9660 inode_available: 0 inode_total: 0 inode_used: 0 mount: /tmp/openstack-config-drive options: ro,relatime,nojoliet,check=s,map=n,blocksize=2048 size_available: 0 size_total: 493568 uuid: 2025-10-06-20-54-17-00 nodename: crc os_family: RedHat ovn_k8s_mp0: active: false device: ovn-k8s-mp0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: b6:dc:d9:26:03:d4 mtu: 1400 promisc: true timestamping: [] type: ether ovs_system: active: false device: ovs-system features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: on [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 7a:ee:8d:4d:53:32 mtu: 1500 promisc: true timestamping: [] type: ether pkg_mgr: atomic_container proc_cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-8a7990dabf52ac75b58b2f3e4b0ab7fa03a563df103fbd3b4d71c823481c83ff/vmlinuz-5.14.0-427.22.1.el9_4.x86_64 boot: UUID=6ea7ef63-bc43-49c4-9337-b3b14ffb2763 cgroup_no_v1: all ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/8a7990dabf52ac75b58b2f3e4b0ab7fa03a563df103fbd3b4d71c823481c83ff/0 psi: '1' root: UUID=68d6f3e9-64e9-44a4-a1d0-311f9c629a01 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor - '8' - AuthenticAMD - AMD EPYC-Rome Processor - '9' - AuthenticAMD - AMD EPYC-Rome Processor - '10' - AuthenticAMD - AMD EPYC-Rome Processor - '11' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 12 processor_nproc: 12 processor_threads_per_core: 1 processor_vcpus: 12 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.2.1 python: executable: /usr/bin/python3.9 has_sslcontext: true type: cpython version: major: 3 micro: 18 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 18 - final - 0 python_version: 3.9.18 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd services: NetworkManager-clean-initrd-state.service: name: NetworkManager-clean-initrd-state.service source: systemd state: stopped status: enabled NetworkManager-dispatcher.service: name: NetworkManager-dispatcher.service source: systemd state: running status: enabled NetworkManager-wait-online.service: name: NetworkManager-wait-online.service source: systemd state: stopped status: enabled NetworkManager.service: name: NetworkManager.service source: systemd state: running status: enabled afterburn-checkin.service: name: afterburn-checkin.service source: systemd state: stopped status: enabled afterburn-firstboot-checkin.service: name: afterburn-firstboot-checkin.service source: systemd state: stopped status: enabled afterburn-sshkeys@.service: name: afterburn-sshkeys@.service source: systemd state: unknown status: disabled afterburn.service: name: afterburn.service source: systemd state: inactive status: disabled arp-ethers.service: name: arp-ethers.service source: systemd state: inactive status: disabled auditd.service: name: auditd.service source: systemd state: running status: enabled auth-rpcgss-module.service: name: auth-rpcgss-module.service source: systemd state: stopped status: static autovt@.service: name: autovt@.service source: systemd state: unknown status: alias blk-availability.service: name: blk-availability.service source: systemd state: stopped status: disabled bootc-fetch-apply-updates.service: name: bootc-fetch-apply-updates.service source: systemd state: inactive status: static bootkube.service: name: bootkube.service source: systemd state: inactive status: disabled bootupd.service: name: bootupd.service source: systemd state: stopped status: static chrony-wait.service: name: chrony-wait.service source: systemd state: inactive status: disabled chronyd-restricted.service: name: chronyd-restricted.service source: systemd state: inactive status: disabled chronyd.service: name: chronyd.service source: systemd state: running status: enabled clevis-luks-askpass.service: name: clevis-luks-askpass.service source: systemd state: stopped status: static cni-dhcp.service: name: cni-dhcp.service source: systemd state: inactive status: disabled configure-cloudinit-ssh.service: name: configure-cloudinit-ssh.service source: systemd state: stopped status: enabled console-getty.service: name: console-getty.service source: systemd state: inactive status: disabled console-login-helper-messages-gensnippet-ssh-keys.service: name: console-login-helper-messages-gensnippet-ssh-keys.service source: systemd state: stopped status: enabled container-getty@.service: name: container-getty@.service source: systemd state: unknown status: static coreos-generate-iscsi-initiatorname.service: name: coreos-generate-iscsi-initiatorname.service source: systemd state: stopped status: enabled coreos-ignition-delete-config.service: name: coreos-ignition-delete-config.service source: systemd state: stopped status: enabled coreos-ignition-firstboot-complete.service: name: coreos-ignition-firstboot-complete.service source: systemd state: stopped status: enabled coreos-ignition-write-issues.service: name: coreos-ignition-write-issues.service source: systemd state: stopped status: enabled coreos-installer-disable-device-auto-activation.service: name: coreos-installer-disable-device-auto-activation.service source: systemd state: inactive status: static coreos-installer-noreboot.service: name: coreos-installer-noreboot.service source: systemd state: inactive status: static coreos-installer-reboot.service: name: coreos-installer-reboot.service source: systemd state: inactive status: static coreos-installer-secure-ipl-reboot.service: name: coreos-installer-secure-ipl-reboot.service source: systemd state: inactive status: static coreos-installer.service: name: coreos-installer.service source: systemd state: inactive status: static coreos-liveiso-success.service: name: coreos-liveiso-success.service source: systemd state: stopped status: enabled coreos-platform-chrony-config.service: name: coreos-platform-chrony-config.service source: systemd state: stopped status: enabled coreos-populate-lvmdevices.service: name: coreos-populate-lvmdevices.service source: systemd state: stopped status: enabled coreos-printk-quiet.service: name: coreos-printk-quiet.service source: systemd state: stopped status: enabled coreos-update-ca-trust.service: name: coreos-update-ca-trust.service source: systemd state: stopped status: enabled crc-dnsmasq.service: name: crc-dnsmasq.service source: systemd state: stopped status: not-found crc-pre.service: name: crc-pre.service source: systemd state: stopped status: enabled crio-subid.service: name: crio-subid.service source: systemd state: stopped status: enabled crio-wipe.service: name: crio-wipe.service source: systemd state: stopped status: disabled crio.service: name: crio.service source: systemd state: stopped status: disabled dbus-broker.service: name: dbus-broker.service source: systemd state: running status: enabled dbus-org.freedesktop.hostname1.service: name: dbus-org.freedesktop.hostname1.service source: systemd state: active status: alias dbus-org.freedesktop.locale1.service: name: dbus-org.freedesktop.locale1.service source: systemd state: inactive status: alias dbus-org.freedesktop.login1.service: name: dbus-org.freedesktop.login1.service source: systemd state: active status: alias dbus-org.freedesktop.nm-dispatcher.service: name: dbus-org.freedesktop.nm-dispatcher.service source: systemd state: active status: alias dbus-org.freedesktop.timedate1.service: name: dbus-org.freedesktop.timedate1.service source: systemd state: inactive status: alias dbus.service: name: dbus.service source: systemd state: active status: alias debug-shell.service: name: debug-shell.service source: systemd state: inactive status: disabled disable-mglru.service: name: disable-mglru.service source: systemd state: stopped status: enabled display-manager.service: name: display-manager.service source: systemd state: stopped status: not-found dm-event.service: name: dm-event.service source: systemd state: stopped status: static dnf-makecache.service: name: dnf-makecache.service source: systemd state: inactive status: static dnsmasq.service: name: dnsmasq.service source: systemd state: running status: enabled dracut-cmdline.service: name: dracut-cmdline.service source: systemd state: stopped status: static dracut-initqueue.service: name: dracut-initqueue.service source: systemd state: stopped status: static dracut-mount.service: name: dracut-mount.service source: systemd state: stopped status: static dracut-pre-mount.service: name: dracut-pre-mount.service source: systemd state: stopped status: static dracut-pre-pivot.service: name: dracut-pre-pivot.service source: systemd state: stopped status: static dracut-pre-trigger.service: name: dracut-pre-trigger.service source: systemd state: stopped status: static dracut-pre-udev.service: name: dracut-pre-udev.service source: systemd state: stopped status: static dracut-shutdown-onfailure.service: name: dracut-shutdown-onfailure.service source: systemd state: stopped status: static dracut-shutdown.service: name: dracut-shutdown.service source: systemd state: stopped status: static dummy-network.service: name: dummy-network.service source: systemd state: stopped status: enabled emergency.service: name: emergency.service source: systemd state: stopped status: static fcoe.service: name: fcoe.service source: systemd state: stopped status: not-found fstrim.service: name: fstrim.service source: systemd state: inactive status: static fwupd-offline-update.service: name: fwupd-offline-update.service source: systemd state: inactive status: static fwupd-refresh.service: name: fwupd-refresh.service source: systemd state: inactive status: static fwupd.service: name: fwupd.service source: systemd state: inactive status: static gcp-routes.service: name: gcp-routes.service source: systemd state: stopped status: enabled getty@.service: name: getty@.service source: systemd state: unknown status: enabled getty@tty1.service: name: getty@tty1.service source: systemd state: running status: active gssproxy.service: name: gssproxy.service source: systemd state: stopped status: disabled gvisor-tap-vsock.service: name: gvisor-tap-vsock.service source: systemd state: running status: enabled hypervfcopyd.service: name: hypervfcopyd.service source: systemd state: inactive status: static hypervkvpd.service: name: hypervkvpd.service source: systemd state: inactive status: static hypervvssd.service: name: hypervvssd.service source: systemd state: inactive status: static ignition-delete-config.service: name: ignition-delete-config.service source: systemd state: stopped status: enabled initrd-cleanup.service: name: initrd-cleanup.service source: systemd state: stopped status: static initrd-parse-etc.service: name: initrd-parse-etc.service source: systemd state: stopped status: static initrd-switch-root.service: name: initrd-switch-root.service source: systemd state: stopped status: static initrd-udevadm-cleanup-db.service: name: initrd-udevadm-cleanup-db.service source: systemd state: stopped status: static irqbalance.service: name: irqbalance.service source: systemd state: running status: enabled iscsi-init.service: name: iscsi-init.service source: systemd state: stopped status: disabled iscsi-onboot.service: name: iscsi-onboot.service source: systemd state: stopped status: enabled iscsi-shutdown.service: name: iscsi-shutdown.service source: systemd state: stopped status: static iscsi-starter.service: name: iscsi-starter.service source: systemd state: inactive status: disabled iscsi.service: name: iscsi.service source: systemd state: stopped status: indirect iscsid.service: name: iscsid.service source: systemd state: stopped status: disabled iscsiuio.service: name: iscsiuio.service source: systemd state: stopped status: disabled kdump.service: name: kdump.service source: systemd state: stopped status: disabled kmod-static-nodes.service: name: kmod-static-nodes.service source: systemd state: stopped status: static kubelet-auto-node-size.service: name: kubelet-auto-node-size.service source: systemd state: stopped status: enabled kubelet-cleanup.service: name: kubelet-cleanup.service source: systemd state: stopped status: enabled kubelet.service: name: kubelet.service source: systemd state: stopped status: disabled kubens.service: name: kubens.service source: systemd state: stopped status: disabled ldconfig.service: name: ldconfig.service source: systemd state: stopped status: static logrotate.service: name: logrotate.service source: systemd state: stopped status: static lvm2-activation-early.service: name: lvm2-activation-early.service source: systemd state: stopped status: not-found lvm2-lvmpolld.service: name: lvm2-lvmpolld.service source: systemd state: stopped status: static lvm2-monitor.service: name: lvm2-monitor.service source: systemd state: stopped status: enabled machine-config-daemon-firstboot.service: name: machine-config-daemon-firstboot.service source: systemd state: stopped status: enabled machine-config-daemon-pull.service: name: machine-config-daemon-pull.service source: systemd state: stopped status: enabled mdadm-grow-continue@.service: name: mdadm-grow-continue@.service source: systemd state: unknown status: static mdadm-last-resort@.service: name: mdadm-last-resort@.service source: systemd state: unknown status: static mdcheck_continue.service: name: mdcheck_continue.service source: systemd state: inactive status: static mdcheck_start.service: name: mdcheck_start.service source: systemd state: inactive status: static mdmon@.service: name: mdmon@.service source: systemd state: unknown status: static mdmonitor-oneshot.service: name: mdmonitor-oneshot.service source: systemd state: inactive status: static mdmonitor.service: name: mdmonitor.service source: systemd state: stopped status: enabled microcode.service: name: microcode.service source: systemd state: stopped status: enabled modprobe@.service: name: modprobe@.service source: systemd state: unknown status: static modprobe@configfs.service: name: modprobe@configfs.service source: systemd state: stopped status: inactive modprobe@drm.service: name: modprobe@drm.service source: systemd state: stopped status: inactive modprobe@efi_pstore.service: name: modprobe@efi_pstore.service source: systemd state: stopped status: inactive modprobe@fuse.service: name: modprobe@fuse.service source: systemd state: stopped status: inactive multipathd.service: name: multipathd.service source: systemd state: stopped status: enabled netavark-dhcp-proxy.service: name: netavark-dhcp-proxy.service source: systemd state: inactive status: disabled netavark-firewalld-reload.service: name: netavark-firewalld-reload.service source: systemd state: inactive status: disabled network.service: name: network.service source: systemd state: stopped status: not-found nfs-blkmap.service: name: nfs-blkmap.service source: systemd state: inactive status: disabled nfs-idmapd.service: name: nfs-idmapd.service source: systemd state: stopped status: static nfs-mountd.service: name: nfs-mountd.service source: systemd state: stopped status: static nfs-server.service: name: nfs-server.service source: systemd state: stopped status: disabled nfs-utils.service: name: nfs-utils.service source: systemd state: stopped status: static nfsdcld.service: name: nfsdcld.service source: systemd state: stopped status: static nftables.service: name: nftables.service source: systemd state: inactive status: disabled nis-domainname.service: name: nis-domainname.service source: systemd state: inactive status: disabled nm-cloud-setup.service: name: nm-cloud-setup.service source: systemd state: inactive status: disabled nm-priv-helper.service: name: nm-priv-helper.service source: systemd state: inactive status: static nmstate.service: name: nmstate.service source: systemd state: stopped status: enabled node-valid-hostname.service: name: node-valid-hostname.service source: systemd state: stopped status: enabled nodeip-configuration.service: name: nodeip-configuration.service source: systemd state: stopped status: enabled ntpd.service: name: ntpd.service source: systemd state: stopped status: not-found ntpdate.service: name: ntpdate.service source: systemd state: stopped status: not-found nvmefc-boot-connections.service: name: nvmefc-boot-connections.service source: systemd state: stopped status: enabled nvmf-autoconnect.service: name: nvmf-autoconnect.service source: systemd state: inactive status: disabled nvmf-connect@.service: name: nvmf-connect@.service source: systemd state: unknown status: static openvswitch.service: name: openvswitch.service source: systemd state: stopped status: enabled ostree-boot-complete.service: name: ostree-boot-complete.service source: systemd state: stopped status: enabled-runtime ostree-finalize-staged-hold.service: name: ostree-finalize-staged-hold.service source: systemd state: stopped status: static ostree-finalize-staged.service: name: ostree-finalize-staged.service source: systemd state: stopped status: static ostree-prepare-root.service: name: ostree-prepare-root.service source: systemd state: inactive status: static ostree-readonly-sysroot-migration.service: name: ostree-readonly-sysroot-migration.service source: systemd state: stopped status: disabled ostree-remount.service: name: ostree-remount.service source: systemd state: stopped status: enabled ostree-state-overlay@.service: name: ostree-state-overlay@.service source: systemd state: unknown status: disabled ovs-configuration.service: name: ovs-configuration.service source: systemd state: stopped status: enabled ovs-delete-transient-ports.service: name: ovs-delete-transient-ports.service source: systemd state: stopped status: static ovs-vswitchd.service: name: ovs-vswitchd.service source: systemd state: running status: static ovsdb-server.service: name: ovsdb-server.service source: systemd state: running status: static pam_namespace.service: name: pam_namespace.service source: systemd state: inactive status: static plymouth-quit-wait.service: name: plymouth-quit-wait.service source: systemd state: stopped status: not-found plymouth-read-write.service: name: plymouth-read-write.service source: systemd state: stopped status: not-found plymouth-start.service: name: plymouth-start.service source: systemd state: stopped status: not-found podman-auto-update.service: name: podman-auto-update.service source: systemd state: inactive status: disabled podman-clean-transient.service: name: podman-clean-transient.service source: systemd state: inactive status: disabled podman-kube@.service: name: podman-kube@.service source: systemd state: unknown status: disabled podman-restart.service: name: podman-restart.service source: systemd state: inactive status: disabled podman.service: name: podman.service source: systemd state: stopped status: disabled polkit.service: name: polkit.service source: systemd state: inactive status: static qemu-guest-agent.service: name: qemu-guest-agent.service source: systemd state: stopped status: enabled quotaon.service: name: quotaon.service source: systemd state: inactive status: static raid-check.service: name: raid-check.service source: systemd state: inactive status: static rbdmap.service: name: rbdmap.service source: systemd state: stopped status: not-found rc-local.service: name: rc-local.service source: systemd state: stopped status: static rdisc.service: name: rdisc.service source: systemd state: inactive status: disabled rdma-load-modules@.service: name: rdma-load-modules@.service source: systemd state: unknown status: static rdma-ndd.service: name: rdma-ndd.service source: systemd state: inactive status: static rescue.service: name: rescue.service source: systemd state: stopped status: static rhcos-usrlocal-selinux-fixup.service: name: rhcos-usrlocal-selinux-fixup.service source: systemd state: stopped status: enabled rpc-gssd.service: name: rpc-gssd.service source: systemd state: stopped status: static rpc-statd-notify.service: name: rpc-statd-notify.service source: systemd state: stopped status: static rpc-statd.service: name: rpc-statd.service source: systemd state: stopped status: static rpc-svcgssd.service: name: rpc-svcgssd.service source: systemd state: stopped status: not-found rpcbind.service: name: rpcbind.service source: systemd state: stopped status: disabled rpm-ostree-bootstatus.service: name: rpm-ostree-bootstatus.service source: systemd state: inactive status: disabled rpm-ostree-countme.service: name: rpm-ostree-countme.service source: systemd state: inactive status: static rpm-ostree-fix-shadow-mode.service: name: rpm-ostree-fix-shadow-mode.service source: systemd state: stopped status: disabled rpm-ostreed-automatic.service: name: rpm-ostreed-automatic.service source: systemd state: inactive status: static rpm-ostreed.service: name: rpm-ostreed.service source: systemd state: inactive status: static rpmdb-rebuild.service: name: rpmdb-rebuild.service source: systemd state: inactive status: disabled selinux-autorelabel-mark.service: name: selinux-autorelabel-mark.service source: systemd state: stopped status: enabled selinux-autorelabel.service: name: selinux-autorelabel.service source: systemd state: inactive status: static selinux-check-proper-disable.service: name: selinux-check-proper-disable.service source: systemd state: inactive status: disabled serial-getty@.service: name: serial-getty@.service source: systemd state: unknown status: disabled sntp.service: name: sntp.service source: systemd state: stopped status: not-found sshd-keygen@.service: name: sshd-keygen@.service source: systemd state: unknown status: disabled sshd-keygen@ecdsa.service: name: sshd-keygen@ecdsa.service source: systemd state: stopped status: inactive sshd-keygen@ed25519.service: name: sshd-keygen@ed25519.service source: systemd state: stopped status: inactive sshd-keygen@rsa.service: name: sshd-keygen@rsa.service source: systemd state: stopped status: inactive sshd.service: name: sshd.service source: systemd state: running status: enabled sshd@.service: name: sshd@.service source: systemd state: unknown status: static sssd-autofs.service: name: sssd-autofs.service source: systemd state: inactive status: indirect sssd-nss.service: name: sssd-nss.service source: systemd state: inactive status: indirect sssd-pac.service: name: sssd-pac.service source: systemd state: inactive status: indirect sssd-pam.service: name: sssd-pam.service source: systemd state: inactive status: indirect sssd-ssh.service: name: sssd-ssh.service source: systemd state: inactive status: indirect sssd-sudo.service: name: sssd-sudo.service source: systemd state: inactive status: indirect sssd.service: name: sssd.service source: systemd state: stopped status: enabled stalld.service: name: stalld.service source: systemd state: inactive status: disabled syslog.service: name: syslog.service source: systemd state: stopped status: not-found system-update-cleanup.service: name: system-update-cleanup.service source: systemd state: inactive status: static systemd-ask-password-console.service: name: systemd-ask-password-console.service source: systemd state: stopped status: static systemd-ask-password-wall.service: name: systemd-ask-password-wall.service source: systemd state: stopped status: static systemd-backlight@.service: name: systemd-backlight@.service source: systemd state: unknown status: static systemd-binfmt.service: name: systemd-binfmt.service source: systemd state: stopped status: static systemd-bless-boot.service: name: systemd-bless-boot.service source: systemd state: inactive status: static systemd-boot-check-no-failures.service: name: systemd-boot-check-no-failures.service source: systemd state: inactive status: disabled systemd-boot-random-seed.service: name: systemd-boot-random-seed.service source: systemd state: stopped status: static systemd-boot-update.service: name: systemd-boot-update.service source: systemd state: stopped status: enabled systemd-coredump@.service: name: systemd-coredump@.service source: systemd state: unknown status: static systemd-exit.service: name: systemd-exit.service source: systemd state: inactive status: static systemd-fsck-root.service: name: systemd-fsck-root.service source: systemd state: stopped status: static systemd-fsck@.service: name: systemd-fsck@.service source: systemd state: unknown status: static systemd-fsck@dev-disk-by\x2duuid-6ea7ef63\x2dbc43\x2d49c4\x2d9337\x2db3b14ffb2763.service: name: systemd-fsck@dev-disk-by\x2duuid-6ea7ef63\x2dbc43\x2d49c4\x2d9337\x2db3b14ffb2763.service source: systemd state: stopped status: active systemd-growfs-root.service: name: systemd-growfs-root.service source: systemd state: inactive status: static systemd-growfs@.service: name: systemd-growfs@.service source: systemd state: unknown status: static systemd-halt.service: name: systemd-halt.service source: systemd state: inactive status: static systemd-hibernate-resume@.service: name: systemd-hibernate-resume@.service source: systemd state: unknown status: static systemd-hibernate.service: name: systemd-hibernate.service source: systemd state: inactive status: static systemd-hostnamed.service: name: systemd-hostnamed.service source: systemd state: running status: static systemd-hwdb-update.service: name: systemd-hwdb-update.service source: systemd state: stopped status: static systemd-hybrid-sleep.service: name: systemd-hybrid-sleep.service source: systemd state: inactive status: static systemd-initctl.service: name: systemd-initctl.service source: systemd state: stopped status: static systemd-journal-catalog-update.service: name: systemd-journal-catalog-update.service source: systemd state: stopped status: static systemd-journal-flush.service: name: systemd-journal-flush.service source: systemd state: stopped status: static systemd-journal-gatewayd.service: name: systemd-journal-gatewayd.service source: systemd state: inactive status: indirect systemd-journal-remote.service: name: systemd-journal-remote.service source: systemd state: inactive status: indirect systemd-journal-upload.service: name: systemd-journal-upload.service source: systemd state: inactive status: disabled systemd-journald.service: name: systemd-journald.service source: systemd state: running status: static systemd-journald@.service: name: systemd-journald@.service source: systemd state: unknown status: static systemd-kexec.service: name: systemd-kexec.service source: systemd state: inactive status: static systemd-localed.service: name: systemd-localed.service source: systemd state: inactive status: static systemd-logind.service: name: systemd-logind.service source: systemd state: running status: static systemd-machine-id-commit.service: name: systemd-machine-id-commit.service source: systemd state: stopped status: static systemd-modules-load.service: name: systemd-modules-load.service source: systemd state: stopped status: static systemd-network-generator.service: name: systemd-network-generator.service source: systemd state: stopped status: enabled systemd-pcrfs-root.service: name: systemd-pcrfs-root.service source: systemd state: inactive status: static systemd-pcrfs@.service: name: systemd-pcrfs@.service source: systemd state: unknown status: static systemd-pcrmachine.service: name: systemd-pcrmachine.service source: systemd state: stopped status: static systemd-pcrphase-initrd.service: name: systemd-pcrphase-initrd.service source: systemd state: stopped status: static systemd-pcrphase-sysinit.service: name: systemd-pcrphase-sysinit.service source: systemd state: stopped status: static systemd-pcrphase.service: name: systemd-pcrphase.service source: systemd state: stopped status: static systemd-poweroff.service: name: systemd-poweroff.service source: systemd state: inactive status: static systemd-pstore.service: name: systemd-pstore.service source: systemd state: stopped status: enabled systemd-quotacheck.service: name: systemd-quotacheck.service source: systemd state: stopped status: static systemd-random-seed.service: name: systemd-random-seed.service source: systemd state: stopped status: static systemd-reboot.service: name: systemd-reboot.service source: systemd state: inactive status: static systemd-remount-fs.service: name: systemd-remount-fs.service source: systemd state: stopped status: enabled-runtime systemd-repart.service: name: systemd-repart.service source: systemd state: stopped status: masked systemd-rfkill.service: name: systemd-rfkill.service source: systemd state: stopped status: static systemd-suspend-then-hibernate.service: name: systemd-suspend-then-hibernate.service source: systemd state: inactive status: static systemd-suspend.service: name: systemd-suspend.service source: systemd state: inactive status: static systemd-sysctl.service: name: systemd-sysctl.service source: systemd state: stopped status: static systemd-sysext.service: name: systemd-sysext.service source: systemd state: stopped status: disabled systemd-sysupdate-reboot.service: name: systemd-sysupdate-reboot.service source: systemd state: inactive status: indirect systemd-sysupdate.service: name: systemd-sysupdate.service source: systemd state: inactive status: indirect systemd-sysusers.service: name: systemd-sysusers.service source: systemd state: stopped status: static systemd-timedated.service: name: systemd-timedated.service source: systemd state: inactive status: static systemd-timesyncd.service: name: systemd-timesyncd.service source: systemd state: stopped status: not-found systemd-tmpfiles-clean.service: name: systemd-tmpfiles-clean.service source: systemd state: stopped status: static systemd-tmpfiles-setup-dev.service: name: systemd-tmpfiles-setup-dev.service source: systemd state: stopped status: static systemd-tmpfiles-setup.service: name: systemd-tmpfiles-setup.service source: systemd state: stopped status: static systemd-tmpfiles.service: name: systemd-tmpfiles.service source: systemd state: stopped status: not-found systemd-udev-settle.service: name: systemd-udev-settle.service source: systemd state: stopped status: static systemd-udev-trigger.service: name: systemd-udev-trigger.service source: systemd state: stopped status: static systemd-udevd.service: name: systemd-udevd.service source: systemd state: running status: static systemd-update-done.service: name: systemd-update-done.service source: systemd state: stopped status: static systemd-update-utmp-runlevel.service: name: systemd-update-utmp-runlevel.service source: systemd state: stopped status: static systemd-update-utmp.service: name: systemd-update-utmp.service source: systemd state: stopped status: static systemd-user-sessions.service: name: systemd-user-sessions.service source: systemd state: stopped status: static systemd-vconsole-setup.service: name: systemd-vconsole-setup.service source: systemd state: stopped status: static systemd-volatile-root.service: name: systemd-volatile-root.service source: systemd state: inactive status: static systemd-zram-setup@.service: name: systemd-zram-setup@.service source: systemd state: unknown status: static teamd@.service: name: teamd@.service source: systemd state: unknown status: static unbound-anchor.service: name: unbound-anchor.service source: systemd state: stopped status: static user-runtime-dir@.service: name: user-runtime-dir@.service source: systemd state: unknown status: static user-runtime-dir@0.service: name: user-runtime-dir@0.service source: systemd state: stopped status: active user-runtime-dir@1000.service: name: user-runtime-dir@1000.service source: systemd state: stopped status: active user@.service: name: user@.service source: systemd state: unknown status: static user@0.service: name: user@0.service source: systemd state: running status: active user@1000.service: name: user@1000.service source: systemd state: running status: active vgauthd.service: name: vgauthd.service source: systemd state: stopped status: enabled vmtoolsd.service: name: vmtoolsd.service source: systemd state: stopped status: enabled wait-for-primary-ip.service: name: wait-for-primary-ip.service source: systemd state: stopped status: enabled ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDs7MQU61ADe4LfEllZo6w2h2Vo1Z9nNArIkKGmgua8bOly2nQBIoDIKgNOXqUpoIZx1528UeeHSQu9SxYL21mo= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIDKHFhjB7ae+dVOClQLGXnCaMXGjEeLhmEhxE64Ddkhe ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQCr2rWpvTGLA5BK4eYXB55gorB9vAJK1K0iUmnm+r9AcvcXH33bR/O6ZNh9h85mHU5l1Gw9nBLRbHn42EU+6Ht6te2Z1gIiJEKpfiC0sR0aMcT4hKQWHmwYqQM/VLXhPiS4OnhO1OJuz0arj1Anr1hDcEJpVTAj3sbfkgzzbBeEWMg2V3Apr1fqDimNlyWRiDFy3TUdKfnB7nucGaGbHneeVxvwv81RGur6I9VHZe/odqEQTGRUBXdu57xybxd6Yc3863ayL5L1OhGTN/x7d8qeEJGb9zt6VvtFWlpVjIXa2l+uTZVfTvufdLwxJdBRg0kHMXH2ZJ3U8w9NRHMBHG7M6YjX0w95uCB/FnyN6s8V/KRQtSnC6Wt6YMP438rM2K9yydXdS/qUQm5hQLP7eY8/Nl4+RDQAvZOjPp+DeUxXfZOqR4qq8tCKi/5Cvd7ChYfPyymeV4RKAJf971EuO0zphyDK8knic0c2XTybK6WTM8lYcbUMYJxg1CW5o1VMjpk= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 265 user_dir: /var/home/core user_gecos: CoreOS Admin user_gid: 1000 user_id: core user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: crc ansible_host: 38.102.83.110 ansible_hostname: crc ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:fe28b1dc-f424-4106-9c95-00604d2bcd5f ansible_interfaces: - ens3 - ovs-system - eth10 - ovn-k8s-mp0 - lo - br-int - br-ex ansible_inventory_sources: - /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/inventory.yaml ansible_is_chroot: true ansible_iscsi_iqn: iqn.1994-05.com.redhat:24fed7ce643e ansible_kernel: 5.14.0-427.22.1.el9_4.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Mon Jun 10 09:23:36 EDT 2024' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] netns_local: on [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: on [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.15 1m: 0.39 5m: 0.33 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.110 - 127.0.0.0/8 - 127.0.0.1 - 192.168.126.11 ipv6: - ::1 - fe80::11a5:cf8e:c9b5:dc91 ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: c1bd596843fb445da20eca66471ddf66 ansible_memfree_mb: 28958 ansible_memory_mb: nocache: free: 30299 used: 1796 real: free: 28958 total: 32095 used: 3137 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 32095 ansible_mounts: - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: /sysroot options: ro,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: /etc options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: /usr options: ro,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: /sysroot/ostree/deploy/rhcos/var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 13218334 block_size: 4096 block_total: 20823803 block_used: 7605469 device: /dev/vda4 fstype: xfs inode_available: 41489056 inode_total: 41680320 inode_used: 191264 mount: /var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 54142296064 size_total: 85294297088 uuid: 68d6f3e9-64e9-44a4-a1d0-311f9c629a01 - block_available: 221344 block_size: 1024 block_total: 358271 block_used: 136927 device: /dev/vda3 fstype: ext4 inode_available: 97936 inode_total: 98304 inode_used: 368 mount: /boot options: ro,seclabel,nosuid,nodev,relatime size_available: 226656256 size_total: 366869504 uuid: 6ea7ef63-bc43-49c4-9337-b3b14ffb2763 - block_available: 0 block_size: 2048 block_total: 241 block_used: 241 device: /dev/sr0 fstype: iso9660 inode_available: 0 inode_total: 0 inode_used: 0 mount: /tmp/openstack-config-drive options: ro,relatime,nojoliet,check=s,map=n,blocksize=2048 size_available: 0 size_total: 493568 uuid: 2025-10-06-20-54-17-00 ansible_nodename: crc ansible_os_family: RedHat ansible_ovn_k8s_mp0: active: false device: ovn-k8s-mp0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: b6:dc:d9:26:03:d4 mtu: 1400 promisc: true timestamping: [] type: ether ansible_ovs_system: active: false device: ovs-system features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: on [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 7a:ee:8d:4d:53:32 mtu: 1500 promisc: true timestamping: [] type: ether ansible_pkg_mgr: atomic_container ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-8a7990dabf52ac75b58b2f3e4b0ab7fa03a563df103fbd3b4d71c823481c83ff/vmlinuz-5.14.0-427.22.1.el9_4.x86_64 boot: UUID=6ea7ef63-bc43-49c4-9337-b3b14ffb2763 cgroup_no_v1: all ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/8a7990dabf52ac75b58b2f3e4b0ab7fa03a563df103fbd3b4d71c823481c83ff/0 psi: '1' root: UUID=68d6f3e9-64e9-44a4-a1d0-311f9c629a01 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor - '8' - AuthenticAMD - AMD EPYC-Rome Processor - '9' - AuthenticAMD - AMD EPYC-Rome Processor - '10' - AuthenticAMD - AMD EPYC-Rome Processor - '11' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 12 ansible_processor_nproc: 12 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 12 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.2.1 ansible_python: executable: /usr/bin/python3.9 has_sslcontext: true type: cpython version: major: 3 micro: 18 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 18 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.18 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDs7MQU61ADe4LfEllZo6w2h2Vo1Z9nNArIkKGmgua8bOly2nQBIoDIKgNOXqUpoIZx1528UeeHSQu9SxYL21mo= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIDKHFhjB7ae+dVOClQLGXnCaMXGjEeLhmEhxE64Ddkhe ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQCr2rWpvTGLA5BK4eYXB55gorB9vAJK1K0iUmnm+r9AcvcXH33bR/O6ZNh9h85mHU5l1Gw9nBLRbHn42EU+6Ht6te2Z1gIiJEKpfiC0sR0aMcT4hKQWHmwYqQM/VLXhPiS4OnhO1OJuz0arj1Anr1hDcEJpVTAj3sbfkgzzbBeEWMg2V3Apr1fqDimNlyWRiDFy3TUdKfnB7nucGaGbHneeVxvwv81RGur6I9VHZe/odqEQTGRUBXdu57xybxd6Yc3863ayL5L1OhGTN/x7d8qeEJGb9zt6VvtFWlpVjIXa2l+uTZVfTvufdLwxJdBRg0kHMXH2ZJ3U8w9NRHMBHG7M6YjX0w95uCB/FnyN6s8V/KRQtSnC6Wt6YMP438rM2K9yydXdS/qUQm5hQLP7eY8/Nl4+RDQAvZOjPp+DeUxXfZOqR4qq8tCKi/5Cvd7ChYfPyymeV4RKAJf971EuO0zphyDK8knic0c2XTybK6WTM8lYcbUMYJxg1CW5o1VMjpk= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 265 ansible_user: core ansible_user_dir: /var/home/core ansible_user_gecos: CoreOS Admin ansible_user_gid: 1000 ansible_user_id: core ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@/var/home/core/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/multinode-ci.yml' - '@/var/home/core/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/horizon.yml' - '@/var/home/core/src/github.com/openstack-k8s-operators/watcher-operator/ci/scenarios/edpm-no-notifications.yml' - '@/var/home/core/src/github.com/openstack-k8s-operators/watcher-operator/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: /var/home/core/.crc/machines/crc/kubeconfig cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: podified-epoxy-centos9 cifmw_test_operator_tempest_registry: 38.102.83.53:5001 cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: 38.102.83.53:5001 cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: vexxhost crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 discovered_interpreter_python: /usr/bin/python3.9 enable_ramdisk: true fetch_dlrn_hash: false gather_subset: - all group_names: - ocps groups: all: - compute-0 - compute-1 - controller - crc computes: - compute-0 - compute-1 ocps: - crc ungrouped: *id001 zuul_unreachable: [] inventory_dir: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0 inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/inventory.yaml inventory_hostname: crc inventory_hostname_short: crc module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: bfd057b4-b43d-4dc5-bc10-e91bf10a649b host_id: 7be6eb536a89b1266edff7cfa16e93a8ea0da5df2cfadeeb194a3ffc interface_ip: 38.102.83.110 label: coreos-crc-extracted-2-39-0-3xl private_ipv4: 38.102.83.110 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.110 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__7c6be8c090d39d526a4b5005be5e95dd82bdf54e playbook_dir: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true services: NetworkManager-clean-initrd-state.service: name: NetworkManager-clean-initrd-state.service source: systemd state: stopped status: enabled NetworkManager-dispatcher.service: name: NetworkManager-dispatcher.service source: systemd state: running status: enabled NetworkManager-wait-online.service: name: NetworkManager-wait-online.service source: systemd state: stopped status: enabled NetworkManager.service: name: NetworkManager.service source: systemd state: running status: enabled afterburn-checkin.service: name: afterburn-checkin.service source: systemd state: stopped status: enabled afterburn-firstboot-checkin.service: name: afterburn-firstboot-checkin.service source: systemd state: stopped status: enabled afterburn-sshkeys@.service: name: afterburn-sshkeys@.service source: systemd state: unknown status: disabled afterburn.service: name: afterburn.service source: systemd state: inactive status: disabled arp-ethers.service: name: arp-ethers.service source: systemd state: inactive status: disabled auditd.service: name: auditd.service source: systemd state: running status: enabled auth-rpcgss-module.service: name: auth-rpcgss-module.service source: systemd state: stopped status: static autovt@.service: name: autovt@.service source: systemd state: unknown status: alias blk-availability.service: name: blk-availability.service source: systemd state: stopped status: disabled bootc-fetch-apply-updates.service: name: bootc-fetch-apply-updates.service source: systemd state: inactive status: static bootkube.service: name: bootkube.service source: systemd state: inactive status: disabled bootupd.service: name: bootupd.service source: systemd state: stopped status: static chrony-wait.service: name: chrony-wait.service source: systemd state: inactive status: disabled chronyd-restricted.service: name: chronyd-restricted.service source: systemd state: inactive status: disabled chronyd.service: name: chronyd.service source: systemd state: running status: enabled clevis-luks-askpass.service: name: clevis-luks-askpass.service source: systemd state: stopped status: static cni-dhcp.service: name: cni-dhcp.service source: systemd state: inactive status: disabled configure-cloudinit-ssh.service: name: configure-cloudinit-ssh.service source: systemd state: stopped status: enabled console-getty.service: name: console-getty.service source: systemd state: inactive status: disabled console-login-helper-messages-gensnippet-ssh-keys.service: name: console-login-helper-messages-gensnippet-ssh-keys.service source: systemd state: stopped status: enabled container-getty@.service: name: container-getty@.service source: systemd state: unknown status: static coreos-generate-iscsi-initiatorname.service: name: coreos-generate-iscsi-initiatorname.service source: systemd state: stopped status: enabled coreos-ignition-delete-config.service: name: coreos-ignition-delete-config.service source: systemd state: stopped status: enabled coreos-ignition-firstboot-complete.service: name: coreos-ignition-firstboot-complete.service source: systemd state: stopped status: enabled coreos-ignition-write-issues.service: name: coreos-ignition-write-issues.service source: systemd state: stopped status: enabled coreos-installer-disable-device-auto-activation.service: name: coreos-installer-disable-device-auto-activation.service source: systemd state: inactive status: static coreos-installer-noreboot.service: name: coreos-installer-noreboot.service source: systemd state: inactive status: static coreos-installer-reboot.service: name: coreos-installer-reboot.service source: systemd state: inactive status: static coreos-installer-secure-ipl-reboot.service: name: coreos-installer-secure-ipl-reboot.service source: systemd state: inactive status: static coreos-installer.service: name: coreos-installer.service source: systemd state: inactive status: static coreos-liveiso-success.service: name: coreos-liveiso-success.service source: systemd state: stopped status: enabled coreos-platform-chrony-config.service: name: coreos-platform-chrony-config.service source: systemd state: stopped status: enabled coreos-populate-lvmdevices.service: name: coreos-populate-lvmdevices.service source: systemd state: stopped status: enabled coreos-printk-quiet.service: name: coreos-printk-quiet.service source: systemd state: stopped status: enabled coreos-update-ca-trust.service: name: coreos-update-ca-trust.service source: systemd state: stopped status: enabled crc-dnsmasq.service: name: crc-dnsmasq.service source: systemd state: stopped status: not-found crc-pre.service: name: crc-pre.service source: systemd state: stopped status: enabled crio-subid.service: name: crio-subid.service source: systemd state: stopped status: enabled crio-wipe.service: name: crio-wipe.service source: systemd state: stopped status: disabled crio.service: name: crio.service source: systemd state: stopped status: disabled dbus-broker.service: name: dbus-broker.service source: systemd state: running status: enabled dbus-org.freedesktop.hostname1.service: name: dbus-org.freedesktop.hostname1.service source: systemd state: active status: alias dbus-org.freedesktop.locale1.service: name: dbus-org.freedesktop.locale1.service source: systemd state: inactive status: alias dbus-org.freedesktop.login1.service: name: dbus-org.freedesktop.login1.service source: systemd state: active status: alias dbus-org.freedesktop.nm-dispatcher.service: name: dbus-org.freedesktop.nm-dispatcher.service source: systemd state: active status: alias dbus-org.freedesktop.timedate1.service: name: dbus-org.freedesktop.timedate1.service source: systemd state: inactive status: alias dbus.service: name: dbus.service source: systemd state: active status: alias debug-shell.service: name: debug-shell.service source: systemd state: inactive status: disabled disable-mglru.service: name: disable-mglru.service source: systemd state: stopped status: enabled display-manager.service: name: display-manager.service source: systemd state: stopped status: not-found dm-event.service: name: dm-event.service source: systemd state: stopped status: static dnf-makecache.service: name: dnf-makecache.service source: systemd state: inactive status: static dnsmasq.service: name: dnsmasq.service source: systemd state: running status: enabled dracut-cmdline.service: name: dracut-cmdline.service source: systemd state: stopped status: static dracut-initqueue.service: name: dracut-initqueue.service source: systemd state: stopped status: static dracut-mount.service: name: dracut-mount.service source: systemd state: stopped status: static dracut-pre-mount.service: name: dracut-pre-mount.service source: systemd state: stopped status: static dracut-pre-pivot.service: name: dracut-pre-pivot.service source: systemd state: stopped status: static dracut-pre-trigger.service: name: dracut-pre-trigger.service source: systemd state: stopped status: static dracut-pre-udev.service: name: dracut-pre-udev.service source: systemd state: stopped status: static dracut-shutdown-onfailure.service: name: dracut-shutdown-onfailure.service source: systemd state: stopped status: static dracut-shutdown.service: name: dracut-shutdown.service source: systemd state: stopped status: static dummy-network.service: name: dummy-network.service source: systemd state: stopped status: enabled emergency.service: name: emergency.service source: systemd state: stopped status: static fcoe.service: name: fcoe.service source: systemd state: stopped status: not-found fstrim.service: name: fstrim.service source: systemd state: inactive status: static fwupd-offline-update.service: name: fwupd-offline-update.service source: systemd state: inactive status: static fwupd-refresh.service: name: fwupd-refresh.service source: systemd state: inactive status: static fwupd.service: name: fwupd.service source: systemd state: inactive status: static gcp-routes.service: name: gcp-routes.service source: systemd state: stopped status: enabled getty@.service: name: getty@.service source: systemd state: unknown status: enabled getty@tty1.service: name: getty@tty1.service source: systemd state: running status: active gssproxy.service: name: gssproxy.service source: systemd state: stopped status: disabled gvisor-tap-vsock.service: name: gvisor-tap-vsock.service source: systemd state: running status: enabled hypervfcopyd.service: name: hypervfcopyd.service source: systemd state: inactive status: static hypervkvpd.service: name: hypervkvpd.service source: systemd state: inactive status: static hypervvssd.service: name: hypervvssd.service source: systemd state: inactive status: static ignition-delete-config.service: name: ignition-delete-config.service source: systemd state: stopped status: enabled initrd-cleanup.service: name: initrd-cleanup.service source: systemd state: stopped status: static initrd-parse-etc.service: name: initrd-parse-etc.service source: systemd state: stopped status: static initrd-switch-root.service: name: initrd-switch-root.service source: systemd state: stopped status: static initrd-udevadm-cleanup-db.service: name: initrd-udevadm-cleanup-db.service source: systemd state: stopped status: static irqbalance.service: name: irqbalance.service source: systemd state: running status: enabled iscsi-init.service: name: iscsi-init.service source: systemd state: stopped status: disabled iscsi-onboot.service: name: iscsi-onboot.service source: systemd state: stopped status: enabled iscsi-shutdown.service: name: iscsi-shutdown.service source: systemd state: stopped status: static iscsi-starter.service: name: iscsi-starter.service source: systemd state: inactive status: disabled iscsi.service: name: iscsi.service source: systemd state: stopped status: indirect iscsid.service: name: iscsid.service source: systemd state: stopped status: disabled iscsiuio.service: name: iscsiuio.service source: systemd state: stopped status: disabled kdump.service: name: kdump.service source: systemd state: stopped status: disabled kmod-static-nodes.service: name: kmod-static-nodes.service source: systemd state: stopped status: static kubelet-auto-node-size.service: name: kubelet-auto-node-size.service source: systemd state: stopped status: enabled kubelet-cleanup.service: name: kubelet-cleanup.service source: systemd state: stopped status: enabled kubelet.service: name: kubelet.service source: systemd state: stopped status: disabled kubens.service: name: kubens.service source: systemd state: stopped status: disabled ldconfig.service: name: ldconfig.service source: systemd state: stopped status: static logrotate.service: name: logrotate.service source: systemd state: stopped status: static lvm2-activation-early.service: name: lvm2-activation-early.service source: systemd state: stopped status: not-found lvm2-lvmpolld.service: name: lvm2-lvmpolld.service source: systemd state: stopped status: static lvm2-monitor.service: name: lvm2-monitor.service source: systemd state: stopped status: enabled machine-config-daemon-firstboot.service: name: machine-config-daemon-firstboot.service source: systemd state: stopped status: enabled machine-config-daemon-pull.service: name: machine-config-daemon-pull.service source: systemd state: stopped status: enabled mdadm-grow-continue@.service: name: mdadm-grow-continue@.service source: systemd state: unknown status: static mdadm-last-resort@.service: name: mdadm-last-resort@.service source: systemd state: unknown status: static mdcheck_continue.service: name: mdcheck_continue.service source: systemd state: inactive status: static mdcheck_start.service: name: mdcheck_start.service source: systemd state: inactive status: static mdmon@.service: name: mdmon@.service source: systemd state: unknown status: static mdmonitor-oneshot.service: name: mdmonitor-oneshot.service source: systemd state: inactive status: static mdmonitor.service: name: mdmonitor.service source: systemd state: stopped status: enabled microcode.service: name: microcode.service source: systemd state: stopped status: enabled modprobe@.service: name: modprobe@.service source: systemd state: unknown status: static modprobe@configfs.service: name: modprobe@configfs.service source: systemd state: stopped status: inactive modprobe@drm.service: name: modprobe@drm.service source: systemd state: stopped status: inactive modprobe@efi_pstore.service: name: modprobe@efi_pstore.service source: systemd state: stopped status: inactive modprobe@fuse.service: name: modprobe@fuse.service source: systemd state: stopped status: inactive multipathd.service: name: multipathd.service source: systemd state: stopped status: enabled netavark-dhcp-proxy.service: name: netavark-dhcp-proxy.service source: systemd state: inactive status: disabled netavark-firewalld-reload.service: name: netavark-firewalld-reload.service source: systemd state: inactive status: disabled network.service: name: network.service source: systemd state: stopped status: not-found nfs-blkmap.service: name: nfs-blkmap.service source: systemd state: inactive status: disabled nfs-idmapd.service: name: nfs-idmapd.service source: systemd state: stopped status: static nfs-mountd.service: name: nfs-mountd.service source: systemd state: stopped status: static nfs-server.service: name: nfs-server.service source: systemd state: stopped status: disabled nfs-utils.service: name: nfs-utils.service source: systemd state: stopped status: static nfsdcld.service: name: nfsdcld.service source: systemd state: stopped status: static nftables.service: name: nftables.service source: systemd state: inactive status: disabled nis-domainname.service: name: nis-domainname.service source: systemd state: inactive status: disabled nm-cloud-setup.service: name: nm-cloud-setup.service source: systemd state: inactive status: disabled nm-priv-helper.service: name: nm-priv-helper.service source: systemd state: inactive status: static nmstate.service: name: nmstate.service source: systemd state: stopped status: enabled node-valid-hostname.service: name: node-valid-hostname.service source: systemd state: stopped status: enabled nodeip-configuration.service: name: nodeip-configuration.service source: systemd state: stopped status: enabled ntpd.service: name: ntpd.service source: systemd state: stopped status: not-found ntpdate.service: name: ntpdate.service source: systemd state: stopped status: not-found nvmefc-boot-connections.service: name: nvmefc-boot-connections.service source: systemd state: stopped status: enabled nvmf-autoconnect.service: name: nvmf-autoconnect.service source: systemd state: inactive status: disabled nvmf-connect@.service: name: nvmf-connect@.service source: systemd state: unknown status: static openvswitch.service: name: openvswitch.service source: systemd state: stopped status: enabled ostree-boot-complete.service: name: ostree-boot-complete.service source: systemd state: stopped status: enabled-runtime ostree-finalize-staged-hold.service: name: ostree-finalize-staged-hold.service source: systemd state: stopped status: static ostree-finalize-staged.service: name: ostree-finalize-staged.service source: systemd state: stopped status: static ostree-prepare-root.service: name: ostree-prepare-root.service source: systemd state: inactive status: static ostree-readonly-sysroot-migration.service: name: ostree-readonly-sysroot-migration.service source: systemd state: stopped status: disabled ostree-remount.service: name: ostree-remount.service source: systemd state: stopped status: enabled ostree-state-overlay@.service: name: ostree-state-overlay@.service source: systemd state: unknown status: disabled ovs-configuration.service: name: ovs-configuration.service source: systemd state: stopped status: enabled ovs-delete-transient-ports.service: name: ovs-delete-transient-ports.service source: systemd state: stopped status: static ovs-vswitchd.service: name: ovs-vswitchd.service source: systemd state: running status: static ovsdb-server.service: name: ovsdb-server.service source: systemd state: running status: static pam_namespace.service: name: pam_namespace.service source: systemd state: inactive status: static plymouth-quit-wait.service: name: plymouth-quit-wait.service source: systemd state: stopped status: not-found plymouth-read-write.service: name: plymouth-read-write.service source: systemd state: stopped status: not-found plymouth-start.service: name: plymouth-start.service source: systemd state: stopped status: not-found podman-auto-update.service: name: podman-auto-update.service source: systemd state: inactive status: disabled podman-clean-transient.service: name: podman-clean-transient.service source: systemd state: inactive status: disabled podman-kube@.service: name: podman-kube@.service source: systemd state: unknown status: disabled podman-restart.service: name: podman-restart.service source: systemd state: inactive status: disabled podman.service: name: podman.service source: systemd state: stopped status: disabled polkit.service: name: polkit.service source: systemd state: inactive status: static qemu-guest-agent.service: name: qemu-guest-agent.service source: systemd state: stopped status: enabled quotaon.service: name: quotaon.service source: systemd state: inactive status: static raid-check.service: name: raid-check.service source: systemd state: inactive status: static rbdmap.service: name: rbdmap.service source: systemd state: stopped status: not-found rc-local.service: name: rc-local.service source: systemd state: stopped status: static rdisc.service: name: rdisc.service source: systemd state: inactive status: disabled rdma-load-modules@.service: name: rdma-load-modules@.service source: systemd state: unknown status: static rdma-ndd.service: name: rdma-ndd.service source: systemd state: inactive status: static rescue.service: name: rescue.service source: systemd state: stopped status: static rhcos-usrlocal-selinux-fixup.service: name: rhcos-usrlocal-selinux-fixup.service source: systemd state: stopped status: enabled rpc-gssd.service: name: rpc-gssd.service source: systemd state: stopped status: static rpc-statd-notify.service: name: rpc-statd-notify.service source: systemd state: stopped status: static rpc-statd.service: name: rpc-statd.service source: systemd state: stopped status: static rpc-svcgssd.service: name: rpc-svcgssd.service source: systemd state: stopped status: not-found rpcbind.service: name: rpcbind.service source: systemd state: stopped status: disabled rpm-ostree-bootstatus.service: name: rpm-ostree-bootstatus.service source: systemd state: inactive status: disabled rpm-ostree-countme.service: name: rpm-ostree-countme.service source: systemd state: inactive status: static rpm-ostree-fix-shadow-mode.service: name: rpm-ostree-fix-shadow-mode.service source: systemd state: stopped status: disabled rpm-ostreed-automatic.service: name: rpm-ostreed-automatic.service source: systemd state: inactive status: static rpm-ostreed.service: name: rpm-ostreed.service source: systemd state: inactive status: static rpmdb-rebuild.service: name: rpmdb-rebuild.service source: systemd state: inactive status: disabled selinux-autorelabel-mark.service: name: selinux-autorelabel-mark.service source: systemd state: stopped status: enabled selinux-autorelabel.service: name: selinux-autorelabel.service source: systemd state: inactive status: static selinux-check-proper-disable.service: name: selinux-check-proper-disable.service source: systemd state: inactive status: disabled serial-getty@.service: name: serial-getty@.service source: systemd state: unknown status: disabled sntp.service: name: sntp.service source: systemd state: stopped status: not-found sshd-keygen@.service: name: sshd-keygen@.service source: systemd state: unknown status: disabled sshd-keygen@ecdsa.service: name: sshd-keygen@ecdsa.service source: systemd state: stopped status: inactive sshd-keygen@ed25519.service: name: sshd-keygen@ed25519.service source: systemd state: stopped status: inactive sshd-keygen@rsa.service: name: sshd-keygen@rsa.service source: systemd state: stopped status: inactive sshd.service: name: sshd.service source: systemd state: running status: enabled sshd@.service: name: sshd@.service source: systemd state: unknown status: static sssd-autofs.service: name: sssd-autofs.service source: systemd state: inactive status: indirect sssd-nss.service: name: sssd-nss.service source: systemd state: inactive status: indirect sssd-pac.service: name: sssd-pac.service source: systemd state: inactive status: indirect sssd-pam.service: name: sssd-pam.service source: systemd state: inactive status: indirect sssd-ssh.service: name: sssd-ssh.service source: systemd state: inactive status: indirect sssd-sudo.service: name: sssd-sudo.service source: systemd state: inactive status: indirect sssd.service: name: sssd.service source: systemd state: stopped status: enabled stalld.service: name: stalld.service source: systemd state: inactive status: disabled syslog.service: name: syslog.service source: systemd state: stopped status: not-found system-update-cleanup.service: name: system-update-cleanup.service source: systemd state: inactive status: static systemd-ask-password-console.service: name: systemd-ask-password-console.service source: systemd state: stopped status: static systemd-ask-password-wall.service: name: systemd-ask-password-wall.service source: systemd state: stopped status: static systemd-backlight@.service: name: systemd-backlight@.service source: systemd state: unknown status: static systemd-binfmt.service: name: systemd-binfmt.service source: systemd state: stopped status: static systemd-bless-boot.service: name: systemd-bless-boot.service source: systemd state: inactive status: static systemd-boot-check-no-failures.service: name: systemd-boot-check-no-failures.service source: systemd state: inactive status: disabled systemd-boot-random-seed.service: name: systemd-boot-random-seed.service source: systemd state: stopped status: static systemd-boot-update.service: name: systemd-boot-update.service source: systemd state: stopped status: enabled systemd-coredump@.service: name: systemd-coredump@.service source: systemd state: unknown status: static systemd-exit.service: name: systemd-exit.service source: systemd state: inactive status: static systemd-fsck-root.service: name: systemd-fsck-root.service source: systemd state: stopped status: static systemd-fsck@.service: name: systemd-fsck@.service source: systemd state: unknown status: static systemd-fsck@dev-disk-by\x2duuid-6ea7ef63\x2dbc43\x2d49c4\x2d9337\x2db3b14ffb2763.service: name: systemd-fsck@dev-disk-by\x2duuid-6ea7ef63\x2dbc43\x2d49c4\x2d9337\x2db3b14ffb2763.service source: systemd state: stopped status: active systemd-growfs-root.service: name: systemd-growfs-root.service source: systemd state: inactive status: static systemd-growfs@.service: name: systemd-growfs@.service source: systemd state: unknown status: static systemd-halt.service: name: systemd-halt.service source: systemd state: inactive status: static systemd-hibernate-resume@.service: name: systemd-hibernate-resume@.service source: systemd state: unknown status: static systemd-hibernate.service: name: systemd-hibernate.service source: systemd state: inactive status: static systemd-hostnamed.service: name: systemd-hostnamed.service source: systemd state: running status: static systemd-hwdb-update.service: name: systemd-hwdb-update.service source: systemd state: stopped status: static systemd-hybrid-sleep.service: name: systemd-hybrid-sleep.service source: systemd state: inactive status: static systemd-initctl.service: name: systemd-initctl.service source: systemd state: stopped status: static systemd-journal-catalog-update.service: name: systemd-journal-catalog-update.service source: systemd state: stopped status: static systemd-journal-flush.service: name: systemd-journal-flush.service source: systemd state: stopped status: static systemd-journal-gatewayd.service: name: systemd-journal-gatewayd.service source: systemd state: inactive status: indirect systemd-journal-remote.service: name: systemd-journal-remote.service source: systemd state: inactive status: indirect systemd-journal-upload.service: name: systemd-journal-upload.service source: systemd state: inactive status: disabled systemd-journald.service: name: systemd-journald.service source: systemd state: running status: static systemd-journald@.service: name: systemd-journald@.service source: systemd state: unknown status: static systemd-kexec.service: name: systemd-kexec.service source: systemd state: inactive status: static systemd-localed.service: name: systemd-localed.service source: systemd state: inactive status: static systemd-logind.service: name: systemd-logind.service source: systemd state: running status: static systemd-machine-id-commit.service: name: systemd-machine-id-commit.service source: systemd state: stopped status: static systemd-modules-load.service: name: systemd-modules-load.service source: systemd state: stopped status: static systemd-network-generator.service: name: systemd-network-generator.service source: systemd state: stopped status: enabled systemd-pcrfs-root.service: name: systemd-pcrfs-root.service source: systemd state: inactive status: static systemd-pcrfs@.service: name: systemd-pcrfs@.service source: systemd state: unknown status: static systemd-pcrmachine.service: name: systemd-pcrmachine.service source: systemd state: stopped status: static systemd-pcrphase-initrd.service: name: systemd-pcrphase-initrd.service source: systemd state: stopped status: static systemd-pcrphase-sysinit.service: name: systemd-pcrphase-sysinit.service source: systemd state: stopped status: static systemd-pcrphase.service: name: systemd-pcrphase.service source: systemd state: stopped status: static systemd-poweroff.service: name: systemd-poweroff.service source: systemd state: inactive status: static systemd-pstore.service: name: systemd-pstore.service source: systemd state: stopped status: enabled systemd-quotacheck.service: name: systemd-quotacheck.service source: systemd state: stopped status: static systemd-random-seed.service: name: systemd-random-seed.service source: systemd state: stopped status: static systemd-reboot.service: name: systemd-reboot.service source: systemd state: inactive status: static systemd-remount-fs.service: name: systemd-remount-fs.service source: systemd state: stopped status: enabled-runtime systemd-repart.service: name: systemd-repart.service source: systemd state: stopped status: masked systemd-rfkill.service: name: systemd-rfkill.service source: systemd state: stopped status: static systemd-suspend-then-hibernate.service: name: systemd-suspend-then-hibernate.service source: systemd state: inactive status: static systemd-suspend.service: name: systemd-suspend.service source: systemd state: inactive status: static systemd-sysctl.service: name: systemd-sysctl.service source: systemd state: stopped status: static systemd-sysext.service: name: systemd-sysext.service source: systemd state: stopped status: disabled systemd-sysupdate-reboot.service: name: systemd-sysupdate-reboot.service source: systemd state: inactive status: indirect systemd-sysupdate.service: name: systemd-sysupdate.service source: systemd state: inactive status: indirect systemd-sysusers.service: name: systemd-sysusers.service source: systemd state: stopped status: static systemd-timedated.service: name: systemd-timedated.service source: systemd state: inactive status: static systemd-timesyncd.service: name: systemd-timesyncd.service source: systemd state: stopped status: not-found systemd-tmpfiles-clean.service: name: systemd-tmpfiles-clean.service source: systemd state: stopped status: static systemd-tmpfiles-setup-dev.service: name: systemd-tmpfiles-setup-dev.service source: systemd state: stopped status: static systemd-tmpfiles-setup.service: name: systemd-tmpfiles-setup.service source: systemd state: stopped status: static systemd-tmpfiles.service: name: systemd-tmpfiles.service source: systemd state: stopped status: not-found systemd-udev-settle.service: name: systemd-udev-settle.service source: systemd state: stopped status: static systemd-udev-trigger.service: name: systemd-udev-trigger.service source: systemd state: stopped status: static systemd-udevd.service: name: systemd-udevd.service source: systemd state: running status: static systemd-update-done.service: name: systemd-update-done.service source: systemd state: stopped status: static systemd-update-utmp-runlevel.service: name: systemd-update-utmp-runlevel.service source: systemd state: stopped status: static systemd-update-utmp.service: name: systemd-update-utmp.service source: systemd state: stopped status: static systemd-user-sessions.service: name: systemd-user-sessions.service source: systemd state: stopped status: static systemd-vconsole-setup.service: name: systemd-vconsole-setup.service source: systemd state: stopped status: static systemd-volatile-root.service: name: systemd-volatile-root.service source: systemd state: inactive status: static systemd-zram-setup@.service: name: systemd-zram-setup@.service source: systemd state: unknown status: static teamd@.service: name: teamd@.service source: systemd state: unknown status: static unbound-anchor.service: name: unbound-anchor.service source: systemd state: stopped status: static user-runtime-dir@.service: name: user-runtime-dir@.service source: systemd state: unknown status: static user-runtime-dir@0.service: name: user-runtime-dir@0.service source: systemd state: stopped status: active user-runtime-dir@1000.service: name: user-runtime-dir@1000.service source: systemd state: stopped status: active user@.service: name: user@.service source: systemd state: unknown status: static user@0.service: name: user@0.service source: systemd state: running status: active user@1000.service: name: user@1000.service source: systemd state: running status: active vgauthd.service: name: vgauthd.service source: systemd state: stopped status: enabled vmtoolsd.service: name: vmtoolsd.service source: systemd state: stopped status: enabled wait-for-primary-ip.service: name: wait-for-primary-ip.service source: systemd state: stopped status: enabled unsafe_vars: ansible_connection: ssh ansible_host: 38.102.83.110 ansible_port: 22 ansible_python_interpreter: auto ansible_user: core cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: bfd057b4-b43d-4dc5-bc10-e91bf10a649b host_id: 7be6eb536a89b1266edff7cfa16e93a8ea0da5df2cfadeeb194a3ffc interface_ip: 38.102.83.110 label: coreos-crc-extracted-2-39-0-3xl private_ipv4: 38.102.83.110 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.110 public_ipv6: '' region: RegionOne slot: null push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul_log_collection: false watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 9ce4c11f9f6a4904bf6148a8276a3232 build_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null buildset: f9416ac601264548b137ce1f44fe627c buildset_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 child_jobs: [] commit_id: 14377136e67c9cd67507a059bfde2f19f140387d event_id: 7dde6e80-a2f2-11f0-83f1-b4af7183f5ac executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/logs result_data_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/results.json src_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/src work_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work items: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null job: watcher-operator-validation-epoxy-ocp4-16 jobtags: [] max_attempts: 1 message: W1dJUF0gTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIHRvIGNvbnRyb2xwbGFuZSBsZXZlbAoKTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIGZyb20gZW5hYmxpbmcgYXQgbm92YS9jaW5kZXIvd2F0Y2hlciBsZXZlbCB0byBvcGVuc3RhY2sgY29udHJvbHBsYW5lIGxldmVsIGFmdGVyIHRoYXQgdXNhZ2UgaXMgYXZhaWxhYmxlIHNpbmNlIGh0dHBzOi8vZ2l0aHViLmNvbS9vcGVuc3RhY2stazhzLW9wZXJhdG9ycy9vcGVuc3RhY2stb3BlcmF0b3IvcHVsbC8xNTkx patchset: 14377136e67c9cd67507a059bfde2f19f140387d pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 playbooks: - path: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/edpm/run.yml roles: - checkout: main checkout_description: playbook branch link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: f6ed2f2d118884a075895bbf954ff6000e540430 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: zuul branch commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: zuul branch commit: 95aa63de3182faad63a69301d101debad3efc936 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: zuul branch commit: 2b5048bbcae44dfeaacbb43830318ca45c13f182 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: zuul branch commit: bb26118ddc70016cbd2118a0b0a35d5f6ab9c343 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: main checkout_description: zuul branch commit: 3bf7652f010ead15ac2d2fec7e3b71c442b8fb8d name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: zuul branch commit: 748dff8508cbb49e00426d46a4487b9f4c0b0096 name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: zuul branch commit: 245af87e94976809f2023f59c19dffb95df97ed9 name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: zuul branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup github.com/openstack-k8s-operators/watcher-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator checkout: main checkout_description: zuul branch commit: 14377136e67c9cd67507a059bfde2f19f140387d name: openstack-k8s-operators/watcher-operator required: false short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 941f6f7666fdff0145523beb29ceda8db25c234c name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/287/head resources: {} tenant: rdoproject.org timeout: 10800 topic: null voting: true zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '0' zuul_execution_trusted: 'False' zuul_log_collection: false zuul_success: 'False' zuul_will_retry: 'False' inventory_dir: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0 inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/post_playbook_0/inventory.yaml inventory_hostname: controller inventory_hostname_short: controller logfiles_dest_dir: /home/zuul/ci-framework-data/logs/2025-10-06_21-12 module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: e795da60-c8d6-4446-ba72-4fddfe4bf7ea host_id: 5519e7a0ee5dc826795d295efc9c908d171b61deb9bf71b1016f861f interface_ip: 38.102.83.51 label: cloud-centos-9-stream-tripleo-medium private_ipv4: 38.102.83.51 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.51 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__7c6be8c090d39d526a4b5005be5e95dd82bdf54e openstack_namespace: openstack param_dir: changed: false failed: false stat: atime: 1759785108.2661083 attr_flags: '' attributes: [] block_size: 4096 blocks: 0 charset: binary ctime: 1759785082.7693696 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 67177381 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1759785082.7693696 nlink: 2 path: /home/zuul/ci-framework-data/artifacts/parameters pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 120 uid: 1000 version: '2370786047' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true play_hosts: *id002 playbook_dir: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true role_name: artifacts role_names: *id003 role_path: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/roles/artifacts role_uuid: fa163ec2-ffbe-e6cf-bc15-00000000001c unsafe_vars: ansible_connection: ssh ansible_host: 38.102.83.51 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: e795da60-c8d6-4446-ba72-4fddfe4bf7ea host_id: 5519e7a0ee5dc826795d295efc9c908d171b61deb9bf71b1016f861f interface_ip: 38.102.83.51 label: cloud-centos-9-stream-tripleo-medium private_ipv4: 38.102.83.51 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.51 public_ipv6: '' region: RegionOne slot: null push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul_log_collection: false watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 9ce4c11f9f6a4904bf6148a8276a3232 build_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null buildset: f9416ac601264548b137ce1f44fe627c buildset_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 child_jobs: [] commit_id: 14377136e67c9cd67507a059bfde2f19f140387d event_id: 7dde6e80-a2f2-11f0-83f1-b4af7183f5ac executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/logs result_data_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/results.json src_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/src work_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work items: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null job: watcher-operator-validation-epoxy-ocp4-16 jobtags: [] max_attempts: 1 message: W1dJUF0gTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIHRvIGNvbnRyb2xwbGFuZSBsZXZlbAoKTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIGZyb20gZW5hYmxpbmcgYXQgbm92YS9jaW5kZXIvd2F0Y2hlciBsZXZlbCB0byBvcGVuc3RhY2sgY29udHJvbHBsYW5lIGxldmVsIGFmdGVyIHRoYXQgdXNhZ2UgaXMgYXZhaWxhYmxlIHNpbmNlIGh0dHBzOi8vZ2l0aHViLmNvbS9vcGVuc3RhY2stazhzLW9wZXJhdG9ycy9vcGVuc3RhY2stb3BlcmF0b3IvcHVsbC8xNTkx patchset: 14377136e67c9cd67507a059bfde2f19f140387d pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 playbooks: - path: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/edpm/run.yml roles: - checkout: main checkout_description: playbook branch link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: f6ed2f2d118884a075895bbf954ff6000e540430 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: zuul branch commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: zuul branch commit: 95aa63de3182faad63a69301d101debad3efc936 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: zuul branch commit: 2b5048bbcae44dfeaacbb43830318ca45c13f182 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: zuul branch commit: bb26118ddc70016cbd2118a0b0a35d5f6ab9c343 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: main checkout_description: zuul branch commit: 3bf7652f010ead15ac2d2fec7e3b71c442b8fb8d name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: zuul branch commit: 748dff8508cbb49e00426d46a4487b9f4c0b0096 name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: zuul branch commit: 245af87e94976809f2023f59c19dffb95df97ed9 name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: zuul branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup github.com/openstack-k8s-operators/watcher-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator checkout: main checkout_description: zuul branch commit: 14377136e67c9cd67507a059bfde2f19f140387d name: openstack-k8s-operators/watcher-operator required: false short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 941f6f7666fdff0145523beb29ceda8db25c234c name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/287/head resources: {} tenant: rdoproject.org timeout: 10800 topic: null voting: true zuul_change_list: - watcher-operator zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '0' zuul_execution_trusted: 'False' zuul_log_collection: false zuul_success: 'False' zuul_will_retry: 'False' home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible-facts.yml0000644000175000017500000005110115071030272025574 0ustar zuulzuul_ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.51 all_ipv6_addresses: - fe80::f816:3eff:fe6f:820 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 crc_ci_bootstrap_instance_default_net_config: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: ip: 172.17.0.5 - key: storage value: ip: 172.18.0.5 - key: tenant value: ip: 172.19.0.5 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2025-10-06T21:00:58Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-10.openstacklocal. hostname: host-192-168-122-10 ip_address: 192.168.122.10 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.10 subnet_id: 139175d8-a9e6-4e3b-931b-a0af30583742 hardware_offload_type: null hints: '' id: 81f69e80-a4c7-43d8-ad59-1b24fcb3acbf ip_allocation: immediate mac_address: fa:16:3e:36:76:9b name: crc-bfd057b4-b43d-4dc5-bc10-e91bf10a649b network_id: febb7485-9e12-4711-8dc7-e207293e25de numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2025-10-06T21:00:58Z' crc_ci_bootstrap_network_name: zuul-ci-net-9ce4c11f crc_ci_bootstrap_networks_out: compute-0: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:1a:6b:7b mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:ec:df:aa mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:e1:d1:4a mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:6e:fd:3e mtu: '1496' parent_iface: eth1 vlan: 22 compute-1: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.101/24 mac: fa:16:3e:cb:47:1e mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.101/24 mac: 52:54:00:69:15:f1 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.101/24 mac: 52:54:00:c3:9b:d0 mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.101/24 mac: 52:54:00:1d:8d:c8 mtu: '1496' parent_iface: eth1 vlan: 22 controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:fc:47:4f mtu: '1500' crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:36:76:9b mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:aa:79:c3 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:bd:b2:92 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:86:1f:43 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-10-06T20:59:40Z' description: '' dns_domain: '' id: febb7485-9e12-4711-8dc7-e207293e25de ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: true l2_adjacency: true mtu: 1500 name: zuul-ci-net-9ce4c11f port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2025-10-06T20:59:40Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2025-10-06T20:59:45Z' description: '' enable_ndp_proxy: null external_gateway_info: null flavor_id: null id: 4c5c07da-6180-4e43-8bfc-7faf50c6c9a5 name: zuul-ci-subnet-router-9ce4c11f project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 1 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2025-10-06T20:59:45Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2025-10-06T20:59:43Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: 139175d8-a9e6-4e3b-931b-a0af30583742 ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-9ce4c11f network_id: febb7485-9e12-4711-8dc7-e207293e25de project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2025-10-06T20:59:43Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-9ce4c11f crc_ci_bootstrap_subnet_name: zuul-ci-subnet-9ce4c11f date_time: date: '2025-10-06' day: '06' epoch: '1759785143' epoch_int: '1759785143' hour: '21' iso8601: '2025-10-06T21:12:23Z' iso8601_basic: 20251006T211223407560 iso8601_basic_short: 20251006T211223 iso8601_micro: '2025-10-06T21:12:23.407560Z' minute: '12' month: '10' second: '23' time: '21:12:23' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Monday weekday_number: '1' weeknumber: '40' year: '2025' default_ipv4: address: 38.102.83.51 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:6f:08:20 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2025-10-06-20-56-29-00 vda1: - 1631a6ad-43b8-436d-ae76-16fa14b94458 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2025-10-06-20-56-29-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '0' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 1631a6ad-43b8-436d-ae76-16fa14b94458 sectors: '83883999' sectorsize: 512 size: 40.00 GB start: '2048' uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '83886080' sectorsize: '512' size: 40.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 35354 22 SSH_CONNECTION: 38.102.83.114 35354 38.102.83.51 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '12' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.51 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe6f:820 prefix: '64' scope: link macaddress: fa:16:3e:6f:08:20 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: controller gather_subset: - min hostname: controller hostnqn: nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0 interfaces: - eth0 - lo is_chroot: false iscsi_iqn: '' kernel: 5.14.0-620.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.03 1m: 0.14 5m: 0.09 locally_reachable_ips: ipv4: - 38.102.83.51 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe6f:820 lsb: {} lvm: N/A machine: x86_64 machine_id: 42833e1b511a402df82cb9cb2fc36491 memfree_mb: 3293 memory_mb: nocache: free: 3444 used: 211 real: free: 3293 total: 3655 used: 362 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 3655 module_setup: true mounts: - block_available: 9966449 block_size: 4096 block_total: 10469115 block_used: 502666 device: /dev/vda1 fstype: xfs inode_available: 20916775 inode_total: 20970992 inode_used: 54217 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 40822575104 size_total: 42881495040 uuid: 1631a6ad-43b8-436d-ae76-16fa14b94458 nodename: controller os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 2 processor_nproc: 2 processor_threads_per_core: 1 processor_vcpus: 2 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.2.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 23 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 23 - final - 0 python_version: 3.9.23 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDpEwzeDGLwNlfP3Up6vCxCw7kSSu0AiDUvDH/J+EepxMPGLLpzT0wX+lEXL9GArqfNU/UBUmiiwh9dZO9tQ5bk= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIPrlPStzDnFCaI6YFfPj0aQKsBPAAZFkT8awb2RrAe7g ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQCsnSYzB9ciMqmgm0l3UC4GvkzqKIRU4HQjm2Wmmz4ONumnWKBZyfZPUd4C1zWgntSd7/HfwyQS5GOnhobA5K/1o855yq/Qr6a2M0JVvxnLdxB089mymIDZ9Z5iXDsVHJNPHKuz3pjoZDbA5XzpQPsDbEeMHpBd0Yz5DQaAPYYN1wg4Wtq6PK86i2jV8qtVH7OnCkn06futt/HtJ4eADwKZV6cutqDHmuTqXwagLJ7PWTm0H9xAYR/Tsgd28krH/EIdcyHBACqdSrk6FWPOdZ1Q5PjVC0ZOHemQeiRhmYW5NgxnnEgSmoTMCyMKRYbVcMYeHKRBg/rXhLSbymoU+eF+Kza486CELgT9KG4Z0NTOmyzNu1ee8G0ZOaowjIQ8Gr6e15WUMLbskShDGqXlAnaRHOAQhBSGCkt0N9KMyGaBdFYVzJOgqi1erPoCN1pLe7Ljr44blAH6Yvp9H8Ji4mLuVYB7PmDHL0Mb4zkjqi/MU9Okx1escBZI4ASrkaXkp18= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 135 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack zuul_change_list: - watcher-operator home/zuul/zuul-output/logs/ci-framework-data/artifacts/install_yamls.sh0000644000175000017500000000055215071030123025544 0ustar zuulzuulexport BMO_SETUP=False export INSTALL_CERT_MANAGER=False export OUT=/home/zuul/ci-framework-data/artifacts/manifests export OUTPUT_DIR=/home/zuul/ci-framework-data/artifacts/edpm export CHECKOUT_FROM_OPENSTACK_REF=true export OPENSTACK_K8S_BRANCH=main export WATCHER_REPO=/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator export WATCHER_BRANCH= home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/0000755000175000017500000000000015071030235024330 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager/0000755000175000017500000000000015071030352026675 5ustar zuulzuul././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager/cert_manager_manifest.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager/cert_manager_manifest.0000644000175000017500000163311415071030202033216 0ustar zuulzuul# Copyright 2022 The cert-manager Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. apiVersion: v1 kind: Namespace metadata: name: cert-manager --- # Source: cert-manager/templates/crds.yaml apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition metadata: name: certificaterequests.cert-manager.io labels: app: 'cert-manager' app.kubernetes.io/name: 'cert-manager' app.kubernetes.io/instance: 'cert-manager' # Generated labels app.kubernetes.io/version: "v1.14.4" spec: group: cert-manager.io names: kind: CertificateRequest listKind: CertificateRequestList plural: certificaterequests shortNames: - cr - crs singular: certificaterequest categories: - cert-manager scope: Namespaced versions: - name: v1 subresources: status: {} additionalPrinterColumns: - jsonPath: .status.conditions[?(@.type=="Approved")].status name: Approved type: string - jsonPath: .status.conditions[?(@.type=="Denied")].status name: Denied type: string - jsonPath: .status.conditions[?(@.type=="Ready")].status name: Ready type: string - jsonPath: .spec.issuerRef.name name: Issuer type: string - jsonPath: .spec.username name: Requestor type: string - jsonPath: .status.conditions[?(@.type=="Ready")].message name: Status priority: 1 type: string - jsonPath: .metadata.creationTimestamp description: CreationTimestamp is a timestamp representing the server time when this object was created. It is not guaranteed to be set in happens-before order across separate operations. Clients may not set this value. It is represented in RFC3339 form and is in UTC. name: Age type: date schema: openAPIV3Schema: description: "A CertificateRequest is used to request a signed certificate from one of the configured issuers. \n All fields within the CertificateRequest's `spec` are immutable after creation. A CertificateRequest will either succeed or fail, as denoted by its `Ready` status condition and its `status.failureTime` field. \n A CertificateRequest is a one-shot resource, meaning it represents a single point in time request for a certificate and cannot be re-used." type: object properties: apiVersion: description: 'APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' type: string kind: description: 'Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' type: string metadata: type: object spec: description: Specification of the desired state of the CertificateRequest resource. https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status type: object required: - issuerRef - request properties: duration: description: Requested 'duration' (i.e. lifetime) of the Certificate. Note that the issuer may choose to ignore the requested duration, just like any other requested attribute. type: string extra: description: Extra contains extra attributes of the user that created the CertificateRequest. Populated by the cert-manager webhook on creation and immutable. type: object additionalProperties: type: array items: type: string groups: description: Groups contains group membership of the user that created the CertificateRequest. Populated by the cert-manager webhook on creation and immutable. type: array items: type: string x-kubernetes-list-type: atomic isCA: description: "Requested basic constraints isCA value. Note that the issuer may choose to ignore the requested isCA value, just like any other requested attribute. \n NOTE: If the CSR in the `Request` field has a BasicConstraints extension, it must have the same isCA value as specified here. \n If true, this will automatically add the `cert sign` usage to the list of requested `usages`." type: boolean issuerRef: description: "Reference to the issuer responsible for issuing the certificate. If the issuer is namespace-scoped, it must be in the same namespace as the Certificate. If the issuer is cluster-scoped, it can be used from any namespace. \n The `name` field of the reference must always be specified." type: object required: - name properties: group: description: Group of the resource being referred to. type: string kind: description: Kind of the resource being referred to. type: string name: description: Name of the resource being referred to. type: string request: description: "The PEM-encoded X.509 certificate signing request to be submitted to the issuer for signing. \n If the CSR has a BasicConstraints extension, its isCA attribute must match the `isCA` value of this CertificateRequest. If the CSR has a KeyUsage extension, its key usages must match the key usages in the `usages` field of this CertificateRequest. If the CSR has a ExtKeyUsage extension, its extended key usages must match the extended key usages in the `usages` field of this CertificateRequest." type: string format: byte uid: description: UID contains the uid of the user that created the CertificateRequest. Populated by the cert-manager webhook on creation and immutable. type: string usages: description: "Requested key usages and extended key usages. \n NOTE: If the CSR in the `Request` field has uses the KeyUsage or ExtKeyUsage extension, these extensions must have the same values as specified here without any additional values. \n If unset, defaults to `digital signature` and `key encipherment`." type: array items: description: "KeyUsage specifies valid usage contexts for keys. See: https://tools.ietf.org/html/rfc5280#section-4.2.1.3 https://tools.ietf.org/html/rfc5280#section-4.2.1.12 \n Valid KeyUsage values are as follows: \"signing\", \"digital signature\", \"content commitment\", \"key encipherment\", \"key agreement\", \"data encipherment\", \"cert sign\", \"crl sign\", \"encipher only\", \"decipher only\", \"any\", \"server auth\", \"client auth\", \"code signing\", \"email protection\", \"s/mime\", \"ipsec end system\", \"ipsec tunnel\", \"ipsec user\", \"timestamping\", \"ocsp signing\", \"microsoft sgc\", \"netscape sgc\"" type: string enum: - signing - digital signature - content commitment - key encipherment - key agreement - data encipherment - cert sign - crl sign - encipher only - decipher only - any - server auth - client auth - code signing - email protection - s/mime - ipsec end system - ipsec tunnel - ipsec user - timestamping - ocsp signing - microsoft sgc - netscape sgc username: description: Username contains the name of the user that created the CertificateRequest. Populated by the cert-manager webhook on creation and immutable. type: string status: description: 'Status of the CertificateRequest. This is set and managed automatically. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status' type: object properties: ca: description: The PEM encoded X.509 certificate of the signer, also known as the CA (Certificate Authority). This is set on a best-effort basis by different issuers. If not set, the CA is assumed to be unknown/not available. type: string format: byte certificate: description: The PEM encoded X.509 certificate resulting from the certificate signing request. If not set, the CertificateRequest has either not been completed or has failed. More information on failure can be found by checking the `conditions` field. type: string format: byte conditions: description: List of status conditions to indicate the status of a CertificateRequest. Known condition types are `Ready`, `InvalidRequest`, `Approved` and `Denied`. type: array items: description: CertificateRequestCondition contains condition information for a CertificateRequest. type: object required: - status - type properties: lastTransitionTime: description: LastTransitionTime is the timestamp corresponding to the last status change of this condition. type: string format: date-time message: description: Message is a human readable description of the details of the last transition, complementing reason. type: string reason: description: Reason is a brief machine readable explanation for the condition's last transition. type: string status: description: Status of the condition, one of (`True`, `False`, `Unknown`). type: string enum: - "True" - "False" - Unknown type: description: Type of the condition, known values are (`Ready`, `InvalidRequest`, `Approved`, `Denied`). type: string x-kubernetes-list-map-keys: - type x-kubernetes-list-type: map failureTime: description: FailureTime stores the time that this CertificateRequest failed. This is used to influence garbage collection and back-off. type: string format: date-time served: true storage: true --- # Source: cert-manager/templates/crds.yaml apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition metadata: name: certificates.cert-manager.io labels: app: 'cert-manager' app.kubernetes.io/name: 'cert-manager' app.kubernetes.io/instance: 'cert-manager' # Generated labels app.kubernetes.io/version: "v1.14.4" spec: group: cert-manager.io names: kind: Certificate listKind: CertificateList plural: certificates shortNames: - cert - certs singular: certificate categories: - cert-manager scope: Namespaced versions: - name: v1 subresources: status: {} additionalPrinterColumns: - jsonPath: .status.conditions[?(@.type=="Ready")].status name: Ready type: string - jsonPath: .spec.secretName name: Secret type: string - jsonPath: .spec.issuerRef.name name: Issuer priority: 1 type: string - jsonPath: .status.conditions[?(@.type=="Ready")].message name: Status priority: 1 type: string - jsonPath: .metadata.creationTimestamp description: CreationTimestamp is a timestamp representing the server time when this object was created. It is not guaranteed to be set in happens-before order across separate operations. Clients may not set this value. It is represented in RFC3339 form and is in UTC. name: Age type: date schema: openAPIV3Schema: description: "A Certificate resource should be created to ensure an up to date and signed X.509 certificate is stored in the Kubernetes Secret resource named in `spec.secretName`. \n The stored certificate will be renewed before it expires (as configured by `spec.renewBefore`)." type: object properties: apiVersion: description: 'APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' type: string kind: description: 'Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' type: string metadata: type: object spec: description: Specification of the desired state of the Certificate resource. https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status type: object required: - issuerRef - secretName properties: additionalOutputFormats: description: "Defines extra output formats of the private key and signed certificate chain to be written to this Certificate's target Secret. \n This is an Alpha Feature and is only enabled with the `--feature-gates=AdditionalCertificateOutputFormats=true` option set on both the controller and webhook components." type: array items: description: CertificateAdditionalOutputFormat defines an additional output format of a Certificate resource. These contain supplementary data formats of the signed certificate chain and paired private key. type: object required: - type properties: type: description: Type is the name of the format type that should be written to the Certificate's target Secret. type: string enum: - DER - CombinedPEM commonName: description: "Requested common name X509 certificate subject attribute. More info: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.6 NOTE: TLS clients will ignore this value when any subject alternative name is set (see https://tools.ietf.org/html/rfc6125#section-6.4.4). \n Should have a length of 64 characters or fewer to avoid generating invalid CSRs. Cannot be set if the `literalSubject` field is set." type: string dnsNames: description: Requested DNS subject alternative names. type: array items: type: string duration: description: "Requested 'duration' (i.e. lifetime) of the Certificate. Note that the issuer may choose to ignore the requested duration, just like any other requested attribute. \n If unset, this defaults to 90 days. Minimum accepted duration is 1 hour. Value must be in units accepted by Go time.ParseDuration https://golang.org/pkg/time/#ParseDuration." type: string emailAddresses: description: Requested email subject alternative names. type: array items: type: string encodeUsagesInRequest: description: "Whether the KeyUsage and ExtKeyUsage extensions should be set in the encoded CSR. \n This option defaults to true, and should only be disabled if the target issuer does not support CSRs with these X509 KeyUsage/ ExtKeyUsage extensions." type: boolean ipAddresses: description: Requested IP address subject alternative names. type: array items: type: string isCA: description: "Requested basic constraints isCA value. The isCA value is used to set the `isCA` field on the created CertificateRequest resources. Note that the issuer may choose to ignore the requested isCA value, just like any other requested attribute. \n If true, this will automatically add the `cert sign` usage to the list of requested `usages`." type: boolean issuerRef: description: "Reference to the issuer responsible for issuing the certificate. If the issuer is namespace-scoped, it must be in the same namespace as the Certificate. If the issuer is cluster-scoped, it can be used from any namespace. \n The `name` field of the reference must always be specified." type: object required: - name properties: group: description: Group of the resource being referred to. type: string kind: description: Kind of the resource being referred to. type: string name: description: Name of the resource being referred to. type: string keystores: description: Additional keystore output formats to be stored in the Certificate's Secret. type: object properties: jks: description: JKS configures options for storing a JKS keystore in the `spec.secretName` Secret resource. type: object required: - create - passwordSecretRef properties: create: description: Create enables JKS keystore creation for the Certificate. If true, a file named `keystore.jks` will be created in the target Secret resource, encrypted using the password stored in `passwordSecretRef`. The keystore file will be updated immediately. If the issuer provided a CA certificate, a file named `truststore.jks` will also be created in the target Secret resource, encrypted using the password stored in `passwordSecretRef` containing the issuing Certificate Authority type: boolean passwordSecretRef: description: PasswordSecretRef is a reference to a key in a Secret resource containing the password used to encrypt the JKS keystore. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string pkcs12: description: PKCS12 configures options for storing a PKCS12 keystore in the `spec.secretName` Secret resource. type: object required: - create - passwordSecretRef properties: create: description: Create enables PKCS12 keystore creation for the Certificate. If true, a file named `keystore.p12` will be created in the target Secret resource, encrypted using the password stored in `passwordSecretRef`. The keystore file will be updated immediately. If the issuer provided a CA certificate, a file named `truststore.p12` will also be created in the target Secret resource, encrypted using the password stored in `passwordSecretRef` containing the issuing Certificate Authority type: boolean passwordSecretRef: description: PasswordSecretRef is a reference to a key in a Secret resource containing the password used to encrypt the PKCS12 keystore. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string profile: description: "Profile specifies the key and certificate encryption algorithms and the HMAC algorithm used to create the PKCS12 keystore. Default value is `LegacyRC2` for backward compatibility. \n If provided, allowed values are: `LegacyRC2`: Deprecated. Not supported by default in OpenSSL 3 or Java 20. `LegacyDES`: Less secure algorithm. Use this option for maximal compatibility. `Modern2023`: Secure algorithm. Use this option in case you have to always use secure algorithms (eg. because of company policy). Please note that the security of the algorithm is not that important in reality, because the unencrypted certificate and private key are also stored in the Secret." type: string enum: - LegacyRC2 - LegacyDES - Modern2023 literalSubject: description: "Requested X.509 certificate subject, represented using the LDAP \"String Representation of a Distinguished Name\" [1]. Important: the LDAP string format also specifies the order of the attributes in the subject, this is important when issuing certs for LDAP authentication. Example: `CN=foo,DC=corp,DC=example,DC=com` More info [1]: https://datatracker.ietf.org/doc/html/rfc4514 More info: https://github.com/cert-manager/cert-manager/issues/3203 More info: https://github.com/cert-manager/cert-manager/issues/4424 \n Cannot be set if the `subject` or `commonName` field is set. This is an Alpha Feature and is only enabled with the `--feature-gates=LiteralCertificateSubject=true` option set on both the controller and webhook components." type: string nameConstraints: description: "x.509 certificate NameConstraint extension which MUST NOT be used in a non-CA certificate. More Info: https://datatracker.ietf.org/doc/html/rfc5280#section-4.2.1.10 \n This is an Alpha Feature and is only enabled with the `--feature-gates=NameConstraints=true` option set on both the controller and webhook components." type: object properties: critical: description: if true then the name constraints are marked critical. type: boolean excluded: description: Excluded contains the constraints which must be disallowed. Any name matching a restriction in the excluded field is invalid regardless of information appearing in the permitted type: object properties: dnsDomains: description: DNSDomains is a list of DNS domains that are permitted or excluded. type: array items: type: string emailAddresses: description: EmailAddresses is a list of Email Addresses that are permitted or excluded. type: array items: type: string ipRanges: description: IPRanges is a list of IP Ranges that are permitted or excluded. This should be a valid CIDR notation. type: array items: type: string uriDomains: description: URIDomains is a list of URI domains that are permitted or excluded. type: array items: type: string permitted: description: Permitted contains the constraints in which the names must be located. type: object properties: dnsDomains: description: DNSDomains is a list of DNS domains that are permitted or excluded. type: array items: type: string emailAddresses: description: EmailAddresses is a list of Email Addresses that are permitted or excluded. type: array items: type: string ipRanges: description: IPRanges is a list of IP Ranges that are permitted or excluded. This should be a valid CIDR notation. type: array items: type: string uriDomains: description: URIDomains is a list of URI domains that are permitted or excluded. type: array items: type: string otherNames: description: '`otherNames` is an escape hatch for SAN that allows any type. We currently restrict the support to string like otherNames, cf RFC 5280 p 37 Any UTF8 String valued otherName can be passed with by setting the keys oid: x.x.x.x and UTF8Value: somevalue for `otherName`. Most commonly this would be UPN set with oid: 1.3.6.1.4.1.311.20.2.3 You should ensure that any OID passed is valid for the UTF8String type as we do not explicitly validate this.' type: array items: type: object properties: oid: description: OID is the object identifier for the otherName SAN. The object identifier must be expressed as a dotted string, for example, "1.2.840.113556.1.4.221". type: string utf8Value: description: utf8Value is the string value of the otherName SAN. The utf8Value accepts any valid UTF8 string to set as value for the otherName SAN. type: string privateKey: description: Private key options. These include the key algorithm and size, the used encoding and the rotation policy. type: object properties: algorithm: description: "Algorithm is the private key algorithm of the corresponding private key for this certificate. \n If provided, allowed values are either `RSA`, `ECDSA` or `Ed25519`. If `algorithm` is specified and `size` is not provided, key size of 2048 will be used for `RSA` key algorithm and key size of 256 will be used for `ECDSA` key algorithm. key size is ignored when using the `Ed25519` key algorithm." type: string enum: - RSA - ECDSA - Ed25519 encoding: description: "The private key cryptography standards (PKCS) encoding for this certificate's private key to be encoded in. \n If provided, allowed values are `PKCS1` and `PKCS8` standing for PKCS#1 and PKCS#8, respectively. Defaults to `PKCS1` if not specified." type: string enum: - PKCS1 - PKCS8 rotationPolicy: description: "RotationPolicy controls how private keys should be regenerated when a re-issuance is being processed. \n If set to `Never`, a private key will only be generated if one does not already exist in the target `spec.secretName`. If one does exists but it does not have the correct algorithm or size, a warning will be raised to await user intervention. If set to `Always`, a private key matching the specified requirements will be generated whenever a re-issuance occurs. Default is `Never` for backward compatibility." type: string enum: - Never - Always size: description: "Size is the key bit size of the corresponding private key for this certificate. \n If `algorithm` is set to `RSA`, valid values are `2048`, `4096` or `8192`, and will default to `2048` if not specified. If `algorithm` is set to `ECDSA`, valid values are `256`, `384` or `521`, and will default to `256` if not specified. If `algorithm` is set to `Ed25519`, Size is ignored. No other values are allowed." type: integer renewBefore: description: "How long before the currently issued certificate's expiry cert-manager should renew the certificate. For example, if a certificate is valid for 60 minutes, and `renewBefore=10m`, cert-manager will begin to attempt to renew the certificate 50 minutes after it was issued (i.e. when there are 10 minutes remaining until the certificate is no longer valid). \n NOTE: The actual lifetime of the issued certificate is used to determine the renewal time. If an issuer returns a certificate with a different lifetime than the one requested, cert-manager will use the lifetime of the issued certificate. \n If unset, this defaults to 1/3 of the issued certificate's lifetime. Minimum accepted value is 5 minutes. Value must be in units accepted by Go time.ParseDuration https://golang.org/pkg/time/#ParseDuration." type: string revisionHistoryLimit: description: "The maximum number of CertificateRequest revisions that are maintained in the Certificate's history. Each revision represents a single `CertificateRequest` created by this Certificate, either when it was created, renewed, or Spec was changed. Revisions will be removed by oldest first if the number of revisions exceeds this number. \n If set, revisionHistoryLimit must be a value of `1` or greater. If unset (`nil`), revisions will not be garbage collected. Default value is `nil`." type: integer format: int32 secretName: description: Name of the Secret resource that will be automatically created and managed by this Certificate resource. It will be populated with a private key and certificate, signed by the denoted issuer. The Secret resource lives in the same namespace as the Certificate resource. type: string secretTemplate: description: Defines annotations and labels to be copied to the Certificate's Secret. Labels and annotations on the Secret will be changed as they appear on the SecretTemplate when added or removed. SecretTemplate annotations are added in conjunction with, and cannot overwrite, the base set of annotations cert-manager sets on the Certificate's Secret. type: object properties: annotations: description: Annotations is a key value map to be copied to the target Kubernetes Secret. type: object additionalProperties: type: string labels: description: Labels is a key value map to be copied to the target Kubernetes Secret. type: object additionalProperties: type: string subject: description: "Requested set of X509 certificate subject attributes. More info: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.6 \n The common name attribute is specified separately in the `commonName` field. Cannot be set if the `literalSubject` field is set." type: object properties: countries: description: Countries to be used on the Certificate. type: array items: type: string localities: description: Cities to be used on the Certificate. type: array items: type: string organizationalUnits: description: Organizational Units to be used on the Certificate. type: array items: type: string organizations: description: Organizations to be used on the Certificate. type: array items: type: string postalCodes: description: Postal codes to be used on the Certificate. type: array items: type: string provinces: description: State/Provinces to be used on the Certificate. type: array items: type: string serialNumber: description: Serial number to be used on the Certificate. type: string streetAddresses: description: Street addresses to be used on the Certificate. type: array items: type: string uris: description: Requested URI subject alternative names. type: array items: type: string usages: description: "Requested key usages and extended key usages. These usages are used to set the `usages` field on the created CertificateRequest resources. If `encodeUsagesInRequest` is unset or set to `true`, the usages will additionally be encoded in the `request` field which contains the CSR blob. \n If unset, defaults to `digital signature` and `key encipherment`." type: array items: description: "KeyUsage specifies valid usage contexts for keys. See: https://tools.ietf.org/html/rfc5280#section-4.2.1.3 https://tools.ietf.org/html/rfc5280#section-4.2.1.12 \n Valid KeyUsage values are as follows: \"signing\", \"digital signature\", \"content commitment\", \"key encipherment\", \"key agreement\", \"data encipherment\", \"cert sign\", \"crl sign\", \"encipher only\", \"decipher only\", \"any\", \"server auth\", \"client auth\", \"code signing\", \"email protection\", \"s/mime\", \"ipsec end system\", \"ipsec tunnel\", \"ipsec user\", \"timestamping\", \"ocsp signing\", \"microsoft sgc\", \"netscape sgc\"" type: string enum: - signing - digital signature - content commitment - key encipherment - key agreement - data encipherment - cert sign - crl sign - encipher only - decipher only - any - server auth - client auth - code signing - email protection - s/mime - ipsec end system - ipsec tunnel - ipsec user - timestamping - ocsp signing - microsoft sgc - netscape sgc status: description: 'Status of the Certificate. This is set and managed automatically. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status' type: object properties: conditions: description: List of status conditions to indicate the status of certificates. Known condition types are `Ready` and `Issuing`. type: array items: description: CertificateCondition contains condition information for an Certificate. type: object required: - status - type properties: lastTransitionTime: description: LastTransitionTime is the timestamp corresponding to the last status change of this condition. type: string format: date-time message: description: Message is a human readable description of the details of the last transition, complementing reason. type: string observedGeneration: description: If set, this represents the .metadata.generation that the condition was set based upon. For instance, if .metadata.generation is currently 12, but the .status.condition[x].observedGeneration is 9, the condition is out of date with respect to the current state of the Certificate. type: integer format: int64 reason: description: Reason is a brief machine readable explanation for the condition's last transition. type: string status: description: Status of the condition, one of (`True`, `False`, `Unknown`). type: string enum: - "True" - "False" - Unknown type: description: Type of the condition, known values are (`Ready`, `Issuing`). type: string x-kubernetes-list-map-keys: - type x-kubernetes-list-type: map failedIssuanceAttempts: description: The number of continuous failed issuance attempts up till now. This field gets removed (if set) on a successful issuance and gets set to 1 if unset and an issuance has failed. If an issuance has failed, the delay till the next issuance will be calculated using formula time.Hour * 2 ^ (failedIssuanceAttempts - 1). type: integer lastFailureTime: description: LastFailureTime is set only if the lastest issuance for this Certificate failed and contains the time of the failure. If an issuance has failed, the delay till the next issuance will be calculated using formula time.Hour * 2 ^ (failedIssuanceAttempts - 1). If the latest issuance has succeeded this field will be unset. type: string format: date-time nextPrivateKeySecretName: description: The name of the Secret resource containing the private key to be used for the next certificate iteration. The keymanager controller will automatically set this field if the `Issuing` condition is set to `True`. It will automatically unset this field when the Issuing condition is not set or False. type: string notAfter: description: The expiration time of the certificate stored in the secret named by this resource in `spec.secretName`. type: string format: date-time notBefore: description: The time after which the certificate stored in the secret named by this resource in `spec.secretName` is valid. type: string format: date-time renewalTime: description: RenewalTime is the time at which the certificate will be next renewed. If not set, no upcoming renewal is scheduled. type: string format: date-time revision: description: "The current 'revision' of the certificate as issued. \n When a CertificateRequest resource is created, it will have the `cert-manager.io/certificate-revision` set to one greater than the current value of this field. \n Upon issuance, this field will be set to the value of the annotation on the CertificateRequest resource used to issue the certificate. \n Persisting the value on the CertificateRequest resource allows the certificates controller to know whether a request is part of an old issuance or if it is part of the ongoing revision's issuance by checking if the revision value in the annotation is greater than this field." type: integer served: true storage: true --- # Source: cert-manager/templates/crds.yaml apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition metadata: name: challenges.acme.cert-manager.io labels: app: 'cert-manager' app.kubernetes.io/name: 'cert-manager' app.kubernetes.io/instance: 'cert-manager' # Generated labels app.kubernetes.io/version: "v1.14.4" spec: group: acme.cert-manager.io names: kind: Challenge listKind: ChallengeList plural: challenges singular: challenge categories: - cert-manager - cert-manager-acme scope: Namespaced versions: - additionalPrinterColumns: - jsonPath: .status.state name: State type: string - jsonPath: .spec.dnsName name: Domain type: string - jsonPath: .status.reason name: Reason priority: 1 type: string - description: CreationTimestamp is a timestamp representing the server time when this object was created. It is not guaranteed to be set in happens-before order across separate operations. Clients may not set this value. It is represented in RFC3339 form and is in UTC. jsonPath: .metadata.creationTimestamp name: Age type: date name: v1 schema: openAPIV3Schema: description: Challenge is a type to represent a Challenge request with an ACME server type: object required: - metadata - spec properties: apiVersion: description: 'APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' type: string kind: description: 'Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' type: string metadata: type: object spec: type: object required: - authorizationURL - dnsName - issuerRef - key - solver - token - type - url properties: authorizationURL: description: The URL to the ACME Authorization resource that this challenge is a part of. type: string dnsName: description: dnsName is the identifier that this challenge is for, e.g. example.com. If the requested DNSName is a 'wildcard', this field MUST be set to the non-wildcard domain, e.g. for `*.example.com`, it must be `example.com`. type: string issuerRef: description: References a properly configured ACME-type Issuer which should be used to create this Challenge. If the Issuer does not exist, processing will be retried. If the Issuer is not an 'ACME' Issuer, an error will be returned and the Challenge will be marked as failed. type: object required: - name properties: group: description: Group of the resource being referred to. type: string kind: description: Kind of the resource being referred to. type: string name: description: Name of the resource being referred to. type: string key: description: 'The ACME challenge key for this challenge For HTTP01 challenges, this is the value that must be responded with to complete the HTTP01 challenge in the format: `.`. For DNS01 challenges, this is the base64 encoded SHA256 sum of the `.` text that must be set as the TXT record content.' type: string solver: description: Contains the domain solving configuration that should be used to solve this challenge resource. type: object properties: dns01: description: Configures cert-manager to attempt to complete authorizations by performing the DNS01 challenge flow. type: object properties: acmeDNS: description: Use the 'ACME DNS' (https://github.com/joohoi/acme-dns) API to manage DNS01 challenge records. type: object required: - accountSecretRef - host properties: accountSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string host: type: string akamai: description: Use the Akamai DNS zone management API to manage DNS01 challenge records. type: object required: - accessTokenSecretRef - clientSecretSecretRef - clientTokenSecretRef - serviceConsumerDomain properties: accessTokenSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string clientSecretSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string clientTokenSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string serviceConsumerDomain: type: string azureDNS: description: Use the Microsoft Azure DNS API to manage DNS01 challenge records. type: object required: - resourceGroupName - subscriptionID properties: clientID: description: 'Auth: Azure Service Principal: The ClientID of the Azure Service Principal used to authenticate with Azure DNS. If set, ClientSecret and TenantID must also be set.' type: string clientSecretSecretRef: description: 'Auth: Azure Service Principal: A reference to a Secret containing the password associated with the Service Principal. If set, ClientID and TenantID must also be set.' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string environment: description: name of the Azure environment (default AzurePublicCloud) type: string enum: - AzurePublicCloud - AzureChinaCloud - AzureGermanCloud - AzureUSGovernmentCloud hostedZoneName: description: name of the DNS zone that should be used type: string managedIdentity: description: 'Auth: Azure Workload Identity or Azure Managed Service Identity: Settings to enable Azure Workload Identity or Azure Managed Service Identity If set, ClientID, ClientSecret and TenantID must not be set.' type: object properties: clientID: description: client ID of the managed identity, can not be used at the same time as resourceID type: string resourceID: description: resource ID of the managed identity, can not be used at the same time as clientID Cannot be used for Azure Managed Service Identity type: string resourceGroupName: description: resource group the DNS zone is located in type: string subscriptionID: description: ID of the Azure subscription type: string tenantID: description: 'Auth: Azure Service Principal: The TenantID of the Azure Service Principal used to authenticate with Azure DNS. If set, ClientID and ClientSecret must also be set.' type: string cloudDNS: description: Use the Google Cloud DNS API to manage DNS01 challenge records. type: object required: - project properties: hostedZoneName: description: HostedZoneName is an optional field that tells cert-manager in which Cloud DNS zone the challenge record has to be created. If left empty cert-manager will automatically choose a zone. type: string project: type: string serviceAccountSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string cloudflare: description: Use the Cloudflare API to manage DNS01 challenge records. type: object properties: apiKeySecretRef: description: 'API key to use to authenticate with Cloudflare. Note: using an API token to authenticate is now the recommended method as it allows greater control of permissions.' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string apiTokenSecretRef: description: API token used to authenticate with Cloudflare. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string email: description: Email of the account, only required when using API key based authentication. type: string cnameStrategy: description: CNAMEStrategy configures how the DNS01 provider should handle CNAME records when found in DNS zones. type: string enum: - None - Follow digitalocean: description: Use the DigitalOcean DNS API to manage DNS01 challenge records. type: object required: - tokenSecretRef properties: tokenSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string rfc2136: description: Use RFC2136 ("Dynamic Updates in the Domain Name System") (https://datatracker.ietf.org/doc/rfc2136/) to manage DNS01 challenge records. type: object required: - nameserver properties: nameserver: description: The IP address or hostname of an authoritative DNS server supporting RFC2136 in the form host:port. If the host is an IPv6 address it must be enclosed in square brackets (e.g [2001:db8::1]) ; port is optional. This field is required. type: string tsigAlgorithm: description: 'The TSIG Algorithm configured in the DNS supporting RFC2136. Used only when ``tsigSecretSecretRef`` and ``tsigKeyName`` are defined. Supported values are (case-insensitive): ``HMACMD5`` (default), ``HMACSHA1``, ``HMACSHA256`` or ``HMACSHA512``.' type: string tsigKeyName: description: The TSIG Key name configured in the DNS. If ``tsigSecretSecretRef`` is defined, this field is required. type: string tsigSecretSecretRef: description: The name of the secret containing the TSIG value. If ``tsigKeyName`` is defined, this field is required. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string route53: description: Use the AWS Route53 API to manage DNS01 challenge records. type: object required: - region properties: accessKeyID: description: 'The AccessKeyID is used for authentication. Cannot be set when SecretAccessKeyID is set. If neither the Access Key nor Key ID are set, we fall-back to using env vars, shared credentials file or AWS Instance metadata, see: https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials' type: string accessKeyIDSecretRef: description: 'The SecretAccessKey is used for authentication. If set, pull the AWS access key ID from a key within a Kubernetes Secret. Cannot be set when AccessKeyID is set. If neither the Access Key nor Key ID are set, we fall-back to using env vars, shared credentials file or AWS Instance metadata, see: https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string hostedZoneID: description: If set, the provider will manage only this zone in Route53 and will not do an lookup using the route53:ListHostedZonesByName api call. type: string region: description: Always set the region when using AccessKeyID and SecretAccessKey type: string role: description: Role is a Role ARN which the Route53 provider will assume using either the explicit credentials AccessKeyID/SecretAccessKey or the inferred credentials from environment variables, shared credentials file or AWS Instance metadata type: string secretAccessKeySecretRef: description: 'The SecretAccessKey is used for authentication. If neither the Access Key nor Key ID are set, we fall-back to using env vars, shared credentials file or AWS Instance metadata, see: https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string webhook: description: Configure an external webhook based DNS01 challenge solver to manage DNS01 challenge records. type: object required: - groupName - solverName properties: config: description: Additional configuration that should be passed to the webhook apiserver when challenges are processed. This can contain arbitrary JSON data. Secret values should not be specified in this stanza. If secret values are needed (e.g. credentials for a DNS service), you should use a SecretKeySelector to reference a Secret resource. For details on the schema of this field, consult the webhook provider implementation's documentation. x-kubernetes-preserve-unknown-fields: true groupName: description: The API group name that should be used when POSTing ChallengePayload resources to the webhook apiserver. This should be the same as the GroupName specified in the webhook provider implementation. type: string solverName: description: The name of the solver to use, as defined in the webhook provider implementation. This will typically be the name of the provider, e.g. 'cloudflare'. type: string http01: description: Configures cert-manager to attempt to complete authorizations by performing the HTTP01 challenge flow. It is not possible to obtain certificates for wildcard domain names (e.g. `*.example.com`) using the HTTP01 challenge mechanism. type: object properties: gatewayHTTPRoute: description: The Gateway API is a sig-network community API that models service networking in Kubernetes (https://gateway-api.sigs.k8s.io/). The Gateway solver will create HTTPRoutes with the specified labels in the same namespace as the challenge. This solver is experimental, and fields / behaviour may change in the future. type: object properties: labels: description: Custom labels that will be applied to HTTPRoutes created by cert-manager while solving HTTP-01 challenges. type: object additionalProperties: type: string parentRefs: description: 'When solving an HTTP-01 challenge, cert-manager creates an HTTPRoute. cert-manager needs to know which parentRefs should be used when creating the HTTPRoute. Usually, the parentRef references a Gateway. See: https://gateway-api.sigs.k8s.io/api-types/httproute/#attaching-to-gateways' type: array items: description: "ParentReference identifies an API object (usually a Gateway) that can be considered a parent of this resource (usually a route). There are two kinds of parent resources with \"Core\" support: \n * Gateway (Gateway conformance profile) * Service (Mesh conformance profile, experimental, ClusterIP Services only) \n This API may be extended in the future to support additional kinds of parent resources. \n The API object must be valid in the cluster; the Group and Kind must be registered in the cluster for this reference to be valid." type: object required: - name properties: group: description: "Group is the group of the referent. When unspecified, \"gateway.networking.k8s.io\" is inferred. To set the core API group (such as for a \"Service\" kind referent), Group must be explicitly set to \"\" (empty string). \n Support: Core" type: string default: gateway.networking.k8s.io maxLength: 253 pattern: ^$|^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$ kind: description: "Kind is kind of the referent. \n There are two kinds of parent resources with \"Core\" support: \n * Gateway (Gateway conformance profile) * Service (Mesh conformance profile, experimental, ClusterIP Services only) \n Support for other resources is Implementation-Specific." type: string default: Gateway maxLength: 63 minLength: 1 pattern: ^[a-zA-Z]([-a-zA-Z0-9]*[a-zA-Z0-9])?$ name: description: "Name is the name of the referent. \n Support: Core" type: string maxLength: 253 minLength: 1 namespace: description: "Namespace is the namespace of the referent. When unspecified, this refers to the local namespace of the Route. \n Note that there are specific rules for ParentRefs which cross namespace boundaries. Cross-namespace references are only valid if they are explicitly allowed by something in the namespace they are referring to. For example: Gateway has the AllowedRoutes field, and ReferenceGrant provides a generic way to enable any other kind of cross-namespace reference. \n ParentRefs from a Route to a Service in the same namespace are \"producer\" routes, which apply default routing rules to inbound connections from any namespace to the Service. \n ParentRefs from a Route to a Service in a different namespace are \"consumer\" routes, and these routing rules are only applied to outbound connections originating from the same namespace as the Route, for which the intended destination of the connections are a Service targeted as a ParentRef of the Route. \n Support: Core" type: string maxLength: 63 minLength: 1 pattern: ^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ port: description: "Port is the network port this Route targets. It can be interpreted differently based on the type of parent resource. \n When the parent resource is a Gateway, this targets all listeners listening on the specified port that also support this kind of Route(and select this Route). It's not recommended to set `Port` unless the networking behaviors specified in a Route must apply to a specific port as opposed to a listener(s) whose port(s) may be changed. When both Port and SectionName are specified, the name and port of the selected listener must match both specified values. \n When the parent resource is a Service, this targets a specific port in the Service spec. When both Port (experimental) and SectionName are specified, the name and port of the selected port must match both specified values. \n Implementations MAY choose to support other parent resources. Implementations supporting other types of parent resources MUST clearly document how/if Port is interpreted. \n For the purpose of status, an attachment is considered successful as long as the parent resource accepts it partially. For example, Gateway listeners can restrict which Routes can attach to them by Route kind, namespace, or hostname. If 1 of 2 Gateway listeners accept attachment from the referencing Route, the Route MUST be considered successfully attached. If no Gateway listeners accept attachment from this Route, the Route MUST be considered detached from the Gateway. \n Support: Extended \n " type: integer format: int32 maximum: 65535 minimum: 1 sectionName: description: "SectionName is the name of a section within the target resource. In the following resources, SectionName is interpreted as the following: \n * Gateway: Listener Name. When both Port (experimental) and SectionName are specified, the name and port of the selected listener must match both specified values. * Service: Port Name. When both Port (experimental) and SectionName are specified, the name and port of the selected listener must match both specified values. Note that attaching Routes to Services as Parents is part of experimental Mesh support and is not supported for any other purpose. \n Implementations MAY choose to support attaching Routes to other resources. If that is the case, they MUST clearly document how SectionName is interpreted. \n When unspecified (empty string), this will reference the entire resource. For the purpose of status, an attachment is considered successful if at least one section in the parent resource accepts it. For example, Gateway listeners can restrict which Routes can attach to them by Route kind, namespace, or hostname. If 1 of 2 Gateway listeners accept attachment from the referencing Route, the Route MUST be considered successfully attached. If no Gateway listeners accept attachment from this Route, the Route MUST be considered detached from the Gateway. \n Support: Core" type: string maxLength: 253 minLength: 1 pattern: ^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$ serviceType: description: Optional service type for Kubernetes solver service. Supported values are NodePort or ClusterIP. If unset, defaults to NodePort. type: string ingress: description: The ingress based HTTP01 challenge solver will solve challenges by creating or modifying Ingress resources in order to route requests for '/.well-known/acme-challenge/XYZ' to 'challenge solver' pods that are provisioned by cert-manager for each Challenge to be completed. type: object properties: class: description: This field configures the annotation `kubernetes.io/ingress.class` when creating Ingress resources to solve ACME challenges that use this challenge solver. Only one of `class`, `name` or `ingressClassName` may be specified. type: string ingressClassName: description: This field configures the field `ingressClassName` on the created Ingress resources used to solve ACME challenges that use this challenge solver. This is the recommended way of configuring the ingress class. Only one of `class`, `name` or `ingressClassName` may be specified. type: string ingressTemplate: description: Optional ingress template used to configure the ACME challenge solver ingress used for HTTP01 challenges. type: object properties: metadata: description: ObjectMeta overrides for the ingress used to solve HTTP01 challenges. Only the 'labels' and 'annotations' fields may be set. If labels or annotations overlap with in-built values, the values here will override the in-built values. type: object properties: annotations: description: Annotations that should be added to the created ACME HTTP01 solver ingress. type: object additionalProperties: type: string labels: description: Labels that should be added to the created ACME HTTP01 solver ingress. type: object additionalProperties: type: string name: description: The name of the ingress resource that should have ACME challenge solving routes inserted into it in order to solve HTTP01 challenges. This is typically used in conjunction with ingress controllers like ingress-gce, which maintains a 1:1 mapping between external IPs and ingress resources. Only one of `class`, `name` or `ingressClassName` may be specified. type: string podTemplate: description: Optional pod template used to configure the ACME challenge solver pods used for HTTP01 challenges. type: object properties: metadata: description: ObjectMeta overrides for the pod used to solve HTTP01 challenges. Only the 'labels' and 'annotations' fields may be set. If labels or annotations overlap with in-built values, the values here will override the in-built values. type: object properties: annotations: description: Annotations that should be added to the create ACME HTTP01 solver pods. type: object additionalProperties: type: string labels: description: Labels that should be added to the created ACME HTTP01 solver pods. type: object additionalProperties: type: string spec: description: PodSpec defines overrides for the HTTP01 challenge solver pod. Check ACMEChallengeSolverHTTP01IngressPodSpec to find out currently supported fields. All other fields will be ignored. type: object properties: affinity: description: If specified, the pod's scheduling constraints type: object properties: nodeAffinity: description: Describes node affinity scheduling rules for the pod. type: object properties: preferredDuringSchedulingIgnoredDuringExecution: description: The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred. type: array items: description: An empty preferred scheduling term matches all objects with implicit weight 0 (i.e. it's a no-op). A null preferred scheduling term matches no objects (i.e. is also a no-op). type: object required: - preference - weight properties: preference: description: A node selector term, associated with the corresponding weight. type: object properties: matchExpressions: description: A list of node selector requirements by node's labels. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string matchFields: description: A list of node selector requirements by node's fields. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string x-kubernetes-map-type: atomic weight: description: Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100. type: integer format: int32 requiredDuringSchedulingIgnoredDuringExecution: description: If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node. type: object required: - nodeSelectorTerms properties: nodeSelectorTerms: description: Required. A list of node selector terms. The terms are ORed. type: array items: description: A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm. type: object properties: matchExpressions: description: A list of node selector requirements by node's labels. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string matchFields: description: A list of node selector requirements by node's fields. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string x-kubernetes-map-type: atomic x-kubernetes-map-type: atomic podAffinity: description: Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)). type: object properties: preferredDuringSchedulingIgnoredDuringExecution: description: The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred. type: array items: description: The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s) type: object required: - podAffinityTerm - weight properties: podAffinityTerm: description: Required. A pod affinity term, associated with the corresponding weight. type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string weight: description: weight associated with matching the corresponding podAffinityTerm, in the range 1-100. type: integer format: int32 requiredDuringSchedulingIgnoredDuringExecution: description: If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied. type: array items: description: Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key matches that of any node on which a pod of the set of pods is running type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string podAntiAffinity: description: Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)). type: object properties: preferredDuringSchedulingIgnoredDuringExecution: description: The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred. type: array items: description: The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s) type: object required: - podAffinityTerm - weight properties: podAffinityTerm: description: Required. A pod affinity term, associated with the corresponding weight. type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string weight: description: weight associated with matching the corresponding podAffinityTerm, in the range 1-100. type: integer format: int32 requiredDuringSchedulingIgnoredDuringExecution: description: If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied. type: array items: description: Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key matches that of any node on which a pod of the set of pods is running type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string imagePullSecrets: description: If specified, the pod's imagePullSecrets type: array items: description: LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace. type: object properties: name: description: 'Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?' type: string x-kubernetes-map-type: atomic nodeSelector: description: 'NodeSelector is a selector which must be true for the pod to fit on a node. Selector which must match a node''s labels for the pod to be scheduled on that node. More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/' type: object additionalProperties: type: string priorityClassName: description: If specified, the pod's priorityClassName. type: string serviceAccountName: description: If specified, the pod's service account type: string tolerations: description: If specified, the pod's tolerations. type: array items: description: The pod this Toleration is attached to tolerates any taint that matches the triple using the matching operator . type: object properties: effect: description: Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute. type: string key: description: Key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys. type: string operator: description: Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category. type: string tolerationSeconds: description: TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system. type: integer format: int64 value: description: Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string. type: string serviceType: description: Optional service type for Kubernetes solver service. Supported values are NodePort or ClusterIP. If unset, defaults to NodePort. type: string selector: description: Selector selects a set of DNSNames on the Certificate resource that should be solved using this challenge solver. If not specified, the solver will be treated as the 'default' solver with the lowest priority, i.e. if any other solver has a more specific match, it will be used instead. type: object properties: dnsNames: description: List of DNSNames that this solver will be used to solve. If specified and a match is found, a dnsNames selector will take precedence over a dnsZones selector. If multiple solvers match with the same dnsNames value, the solver with the most matching labels in matchLabels will be selected. If neither has more matches, the solver defined earlier in the list will be selected. type: array items: type: string dnsZones: description: List of DNSZones that this solver will be used to solve. The most specific DNS zone match specified here will take precedence over other DNS zone matches, so a solver specifying sys.example.com will be selected over one specifying example.com for the domain www.sys.example.com. If multiple solvers match with the same dnsZones value, the solver with the most matching labels in matchLabels will be selected. If neither has more matches, the solver defined earlier in the list will be selected. type: array items: type: string matchLabels: description: A label selector that is used to refine the set of certificate's that this challenge solver will apply to. type: object additionalProperties: type: string token: description: The ACME challenge token for this challenge. This is the raw value returned from the ACME server. type: string type: description: The type of ACME challenge this resource represents. One of "HTTP-01" or "DNS-01". type: string enum: - HTTP-01 - DNS-01 url: description: The URL of the ACME Challenge resource for this challenge. This can be used to lookup details about the status of this challenge. type: string wildcard: description: wildcard will be true if this challenge is for a wildcard identifier, for example '*.example.com'. type: boolean status: type: object properties: presented: description: presented will be set to true if the challenge values for this challenge are currently 'presented'. This *does not* imply the self check is passing. Only that the values have been 'submitted' for the appropriate challenge mechanism (i.e. the DNS01 TXT record has been presented, or the HTTP01 configuration has been configured). type: boolean processing: description: Used to denote whether this challenge should be processed or not. This field will only be set to true by the 'scheduling' component. It will only be set to false by the 'challenges' controller, after the challenge has reached a final state or timed out. If this field is set to false, the challenge controller will not take any more action. type: boolean reason: description: Contains human readable information on why the Challenge is in the current state. type: string state: description: Contains the current 'state' of the challenge. If not set, the state of the challenge is unknown. type: string enum: - valid - ready - pending - processing - invalid - expired - errored served: true storage: true subresources: status: {} --- # Source: cert-manager/templates/crds.yaml apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition metadata: name: clusterissuers.cert-manager.io labels: app: 'cert-manager' app.kubernetes.io/name: 'cert-manager' app.kubernetes.io/instance: "cert-manager" # Generated labels app.kubernetes.io/version: "v1.14.4" spec: group: cert-manager.io names: kind: ClusterIssuer listKind: ClusterIssuerList plural: clusterissuers singular: clusterissuer categories: - cert-manager scope: Cluster versions: - name: v1 subresources: status: {} additionalPrinterColumns: - jsonPath: .status.conditions[?(@.type=="Ready")].status name: Ready type: string - jsonPath: .status.conditions[?(@.type=="Ready")].message name: Status priority: 1 type: string - jsonPath: .metadata.creationTimestamp description: CreationTimestamp is a timestamp representing the server time when this object was created. It is not guaranteed to be set in happens-before order across separate operations. Clients may not set this value. It is represented in RFC3339 form and is in UTC. name: Age type: date schema: openAPIV3Schema: description: A ClusterIssuer represents a certificate issuing authority which can be referenced as part of `issuerRef` fields. It is similar to an Issuer, however it is cluster-scoped and therefore can be referenced by resources that exist in *any* namespace, not just the same namespace as the referent. type: object required: - spec properties: apiVersion: description: 'APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' type: string kind: description: 'Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' type: string metadata: type: object spec: description: Desired state of the ClusterIssuer resource. type: object properties: acme: description: ACME configures this issuer to communicate with a RFC8555 (ACME) server to obtain signed x509 certificates. type: object required: - privateKeySecretRef - server properties: caBundle: description: Base64-encoded bundle of PEM CAs which can be used to validate the certificate chain presented by the ACME server. Mutually exclusive with SkipTLSVerify; prefer using CABundle to prevent various kinds of security vulnerabilities. If CABundle and SkipTLSVerify are unset, the system certificate bundle inside the container is used to validate the TLS connection. type: string format: byte disableAccountKeyGeneration: description: Enables or disables generating a new ACME account key. If true, the Issuer resource will *not* request a new account but will expect the account key to be supplied via an existing secret. If false, the cert-manager system will generate a new ACME account key for the Issuer. Defaults to false. type: boolean email: description: Email is the email address to be associated with the ACME account. This field is optional, but it is strongly recommended to be set. It will be used to contact you in case of issues with your account or certificates, including expiry notification emails. This field may be updated after the account is initially registered. type: string enableDurationFeature: description: Enables requesting a Not After date on certificates that matches the duration of the certificate. This is not supported by all ACME servers like Let's Encrypt. If set to true when the ACME server does not support it it will create an error on the Order. Defaults to false. type: boolean externalAccountBinding: description: ExternalAccountBinding is a reference to a CA external account of the ACME server. If set, upon registration cert-manager will attempt to associate the given external account credentials with the registered ACME account. type: object required: - keyID - keySecretRef properties: keyAlgorithm: description: 'Deprecated: keyAlgorithm field exists for historical compatibility reasons and should not be used. The algorithm is now hardcoded to HS256 in golang/x/crypto/acme.' type: string enum: - HS256 - HS384 - HS512 keyID: description: keyID is the ID of the CA key that the External Account is bound to. type: string keySecretRef: description: keySecretRef is a Secret Key Selector referencing a data item in a Kubernetes Secret which holds the symmetric MAC key of the External Account Binding. The `key` is the index string that is paired with the key data in the Secret and should not be confused with the key data itself, or indeed with the External Account Binding keyID above. The secret key stored in the Secret **must** be un-padded, base64 URL encoded data. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string preferredChain: description: 'PreferredChain is the chain to use if the ACME server outputs multiple. PreferredChain is no guarantee that this one gets delivered by the ACME endpoint. For example, for Let''s Encrypt''s DST crosssign you would use: "DST Root CA X3" or "ISRG Root X1" for the newer Let''s Encrypt root CA. This value picks the first certificate bundle in the ACME alternative chains that has a certificate with this value as its issuer''s CN' type: string maxLength: 64 privateKeySecretRef: description: PrivateKey is the name of a Kubernetes Secret resource that will be used to store the automatically generated ACME account private key. Optionally, a `key` may be specified to select a specific entry within the named Secret resource. If `key` is not specified, a default of `tls.key` will be used. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string server: description: 'Server is the URL used to access the ACME server''s ''directory'' endpoint. For example, for Let''s Encrypt''s staging endpoint, you would use: "https://acme-staging-v02.api.letsencrypt.org/directory". Only ACME v2 endpoints (i.e. RFC 8555) are supported.' type: string skipTLSVerify: description: 'INSECURE: Enables or disables validation of the ACME server TLS certificate. If true, requests to the ACME server will not have the TLS certificate chain validated. Mutually exclusive with CABundle; prefer using CABundle to prevent various kinds of security vulnerabilities. Only enable this option in development environments. If CABundle and SkipTLSVerify are unset, the system certificate bundle inside the container is used to validate the TLS connection. Defaults to false.' type: boolean solvers: description: 'Solvers is a list of challenge solvers that will be used to solve ACME challenges for the matching domains. Solver configurations must be provided in order to obtain certificates from an ACME server. For more information, see: https://cert-manager.io/docs/configuration/acme/' type: array items: description: An ACMEChallengeSolver describes how to solve ACME challenges for the issuer it is part of. A selector may be provided to use different solving strategies for different DNS names. Only one of HTTP01 or DNS01 must be provided. type: object properties: dns01: description: Configures cert-manager to attempt to complete authorizations by performing the DNS01 challenge flow. type: object properties: acmeDNS: description: Use the 'ACME DNS' (https://github.com/joohoi/acme-dns) API to manage DNS01 challenge records. type: object required: - accountSecretRef - host properties: accountSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string host: type: string akamai: description: Use the Akamai DNS zone management API to manage DNS01 challenge records. type: object required: - accessTokenSecretRef - clientSecretSecretRef - clientTokenSecretRef - serviceConsumerDomain properties: accessTokenSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string clientSecretSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string clientTokenSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string serviceConsumerDomain: type: string azureDNS: description: Use the Microsoft Azure DNS API to manage DNS01 challenge records. type: object required: - resourceGroupName - subscriptionID properties: clientID: description: 'Auth: Azure Service Principal: The ClientID of the Azure Service Principal used to authenticate with Azure DNS. If set, ClientSecret and TenantID must also be set.' type: string clientSecretSecretRef: description: 'Auth: Azure Service Principal: A reference to a Secret containing the password associated with the Service Principal. If set, ClientID and TenantID must also be set.' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string environment: description: name of the Azure environment (default AzurePublicCloud) type: string enum: - AzurePublicCloud - AzureChinaCloud - AzureGermanCloud - AzureUSGovernmentCloud hostedZoneName: description: name of the DNS zone that should be used type: string managedIdentity: description: 'Auth: Azure Workload Identity or Azure Managed Service Identity: Settings to enable Azure Workload Identity or Azure Managed Service Identity If set, ClientID, ClientSecret and TenantID must not be set.' type: object properties: clientID: description: client ID of the managed identity, can not be used at the same time as resourceID type: string resourceID: description: resource ID of the managed identity, can not be used at the same time as clientID Cannot be used for Azure Managed Service Identity type: string resourceGroupName: description: resource group the DNS zone is located in type: string subscriptionID: description: ID of the Azure subscription type: string tenantID: description: 'Auth: Azure Service Principal: The TenantID of the Azure Service Principal used to authenticate with Azure DNS. If set, ClientID and ClientSecret must also be set.' type: string cloudDNS: description: Use the Google Cloud DNS API to manage DNS01 challenge records. type: object required: - project properties: hostedZoneName: description: HostedZoneName is an optional field that tells cert-manager in which Cloud DNS zone the challenge record has to be created. If left empty cert-manager will automatically choose a zone. type: string project: type: string serviceAccountSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string cloudflare: description: Use the Cloudflare API to manage DNS01 challenge records. type: object properties: apiKeySecretRef: description: 'API key to use to authenticate with Cloudflare. Note: using an API token to authenticate is now the recommended method as it allows greater control of permissions.' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string apiTokenSecretRef: description: API token used to authenticate with Cloudflare. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string email: description: Email of the account, only required when using API key based authentication. type: string cnameStrategy: description: CNAMEStrategy configures how the DNS01 provider should handle CNAME records when found in DNS zones. type: string enum: - None - Follow digitalocean: description: Use the DigitalOcean DNS API to manage DNS01 challenge records. type: object required: - tokenSecretRef properties: tokenSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string rfc2136: description: Use RFC2136 ("Dynamic Updates in the Domain Name System") (https://datatracker.ietf.org/doc/rfc2136/) to manage DNS01 challenge records. type: object required: - nameserver properties: nameserver: description: The IP address or hostname of an authoritative DNS server supporting RFC2136 in the form host:port. If the host is an IPv6 address it must be enclosed in square brackets (e.g [2001:db8::1]) ; port is optional. This field is required. type: string tsigAlgorithm: description: 'The TSIG Algorithm configured in the DNS supporting RFC2136. Used only when ``tsigSecretSecretRef`` and ``tsigKeyName`` are defined. Supported values are (case-insensitive): ``HMACMD5`` (default), ``HMACSHA1``, ``HMACSHA256`` or ``HMACSHA512``.' type: string tsigKeyName: description: The TSIG Key name configured in the DNS. If ``tsigSecretSecretRef`` is defined, this field is required. type: string tsigSecretSecretRef: description: The name of the secret containing the TSIG value. If ``tsigKeyName`` is defined, this field is required. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string route53: description: Use the AWS Route53 API to manage DNS01 challenge records. type: object required: - region properties: accessKeyID: description: 'The AccessKeyID is used for authentication. Cannot be set when SecretAccessKeyID is set. If neither the Access Key nor Key ID are set, we fall-back to using env vars, shared credentials file or AWS Instance metadata, see: https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials' type: string accessKeyIDSecretRef: description: 'The SecretAccessKey is used for authentication. If set, pull the AWS access key ID from a key within a Kubernetes Secret. Cannot be set when AccessKeyID is set. If neither the Access Key nor Key ID are set, we fall-back to using env vars, shared credentials file or AWS Instance metadata, see: https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string hostedZoneID: description: If set, the provider will manage only this zone in Route53 and will not do an lookup using the route53:ListHostedZonesByName api call. type: string region: description: Always set the region when using AccessKeyID and SecretAccessKey type: string role: description: Role is a Role ARN which the Route53 provider will assume using either the explicit credentials AccessKeyID/SecretAccessKey or the inferred credentials from environment variables, shared credentials file or AWS Instance metadata type: string secretAccessKeySecretRef: description: 'The SecretAccessKey is used for authentication. If neither the Access Key nor Key ID are set, we fall-back to using env vars, shared credentials file or AWS Instance metadata, see: https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string webhook: description: Configure an external webhook based DNS01 challenge solver to manage DNS01 challenge records. type: object required: - groupName - solverName properties: config: description: Additional configuration that should be passed to the webhook apiserver when challenges are processed. This can contain arbitrary JSON data. Secret values should not be specified in this stanza. If secret values are needed (e.g. credentials for a DNS service), you should use a SecretKeySelector to reference a Secret resource. For details on the schema of this field, consult the webhook provider implementation's documentation. x-kubernetes-preserve-unknown-fields: true groupName: description: The API group name that should be used when POSTing ChallengePayload resources to the webhook apiserver. This should be the same as the GroupName specified in the webhook provider implementation. type: string solverName: description: The name of the solver to use, as defined in the webhook provider implementation. This will typically be the name of the provider, e.g. 'cloudflare'. type: string http01: description: Configures cert-manager to attempt to complete authorizations by performing the HTTP01 challenge flow. It is not possible to obtain certificates for wildcard domain names (e.g. `*.example.com`) using the HTTP01 challenge mechanism. type: object properties: gatewayHTTPRoute: description: The Gateway API is a sig-network community API that models service networking in Kubernetes (https://gateway-api.sigs.k8s.io/). The Gateway solver will create HTTPRoutes with the specified labels in the same namespace as the challenge. This solver is experimental, and fields / behaviour may change in the future. type: object properties: labels: description: Custom labels that will be applied to HTTPRoutes created by cert-manager while solving HTTP-01 challenges. type: object additionalProperties: type: string parentRefs: description: 'When solving an HTTP-01 challenge, cert-manager creates an HTTPRoute. cert-manager needs to know which parentRefs should be used when creating the HTTPRoute. Usually, the parentRef references a Gateway. See: https://gateway-api.sigs.k8s.io/api-types/httproute/#attaching-to-gateways' type: array items: description: "ParentReference identifies an API object (usually a Gateway) that can be considered a parent of this resource (usually a route). There are two kinds of parent resources with \"Core\" support: \n * Gateway (Gateway conformance profile) * Service (Mesh conformance profile, experimental, ClusterIP Services only) \n This API may be extended in the future to support additional kinds of parent resources. \n The API object must be valid in the cluster; the Group and Kind must be registered in the cluster for this reference to be valid." type: object required: - name properties: group: description: "Group is the group of the referent. When unspecified, \"gateway.networking.k8s.io\" is inferred. To set the core API group (such as for a \"Service\" kind referent), Group must be explicitly set to \"\" (empty string). \n Support: Core" type: string default: gateway.networking.k8s.io maxLength: 253 pattern: ^$|^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$ kind: description: "Kind is kind of the referent. \n There are two kinds of parent resources with \"Core\" support: \n * Gateway (Gateway conformance profile) * Service (Mesh conformance profile, experimental, ClusterIP Services only) \n Support for other resources is Implementation-Specific." type: string default: Gateway maxLength: 63 minLength: 1 pattern: ^[a-zA-Z]([-a-zA-Z0-9]*[a-zA-Z0-9])?$ name: description: "Name is the name of the referent. \n Support: Core" type: string maxLength: 253 minLength: 1 namespace: description: "Namespace is the namespace of the referent. When unspecified, this refers to the local namespace of the Route. \n Note that there are specific rules for ParentRefs which cross namespace boundaries. Cross-namespace references are only valid if they are explicitly allowed by something in the namespace they are referring to. For example: Gateway has the AllowedRoutes field, and ReferenceGrant provides a generic way to enable any other kind of cross-namespace reference. \n ParentRefs from a Route to a Service in the same namespace are \"producer\" routes, which apply default routing rules to inbound connections from any namespace to the Service. \n ParentRefs from a Route to a Service in a different namespace are \"consumer\" routes, and these routing rules are only applied to outbound connections originating from the same namespace as the Route, for which the intended destination of the connections are a Service targeted as a ParentRef of the Route. \n Support: Core" type: string maxLength: 63 minLength: 1 pattern: ^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ port: description: "Port is the network port this Route targets. It can be interpreted differently based on the type of parent resource. \n When the parent resource is a Gateway, this targets all listeners listening on the specified port that also support this kind of Route(and select this Route). It's not recommended to set `Port` unless the networking behaviors specified in a Route must apply to a specific port as opposed to a listener(s) whose port(s) may be changed. When both Port and SectionName are specified, the name and port of the selected listener must match both specified values. \n When the parent resource is a Service, this targets a specific port in the Service spec. When both Port (experimental) and SectionName are specified, the name and port of the selected port must match both specified values. \n Implementations MAY choose to support other parent resources. Implementations supporting other types of parent resources MUST clearly document how/if Port is interpreted. \n For the purpose of status, an attachment is considered successful as long as the parent resource accepts it partially. For example, Gateway listeners can restrict which Routes can attach to them by Route kind, namespace, or hostname. If 1 of 2 Gateway listeners accept attachment from the referencing Route, the Route MUST be considered successfully attached. If no Gateway listeners accept attachment from this Route, the Route MUST be considered detached from the Gateway. \n Support: Extended \n " type: integer format: int32 maximum: 65535 minimum: 1 sectionName: description: "SectionName is the name of a section within the target resource. In the following resources, SectionName is interpreted as the following: \n * Gateway: Listener Name. When both Port (experimental) and SectionName are specified, the name and port of the selected listener must match both specified values. * Service: Port Name. When both Port (experimental) and SectionName are specified, the name and port of the selected listener must match both specified values. Note that attaching Routes to Services as Parents is part of experimental Mesh support and is not supported for any other purpose. \n Implementations MAY choose to support attaching Routes to other resources. If that is the case, they MUST clearly document how SectionName is interpreted. \n When unspecified (empty string), this will reference the entire resource. For the purpose of status, an attachment is considered successful if at least one section in the parent resource accepts it. For example, Gateway listeners can restrict which Routes can attach to them by Route kind, namespace, or hostname. If 1 of 2 Gateway listeners accept attachment from the referencing Route, the Route MUST be considered successfully attached. If no Gateway listeners accept attachment from this Route, the Route MUST be considered detached from the Gateway. \n Support: Core" type: string maxLength: 253 minLength: 1 pattern: ^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$ serviceType: description: Optional service type for Kubernetes solver service. Supported values are NodePort or ClusterIP. If unset, defaults to NodePort. type: string ingress: description: The ingress based HTTP01 challenge solver will solve challenges by creating or modifying Ingress resources in order to route requests for '/.well-known/acme-challenge/XYZ' to 'challenge solver' pods that are provisioned by cert-manager for each Challenge to be completed. type: object properties: class: description: This field configures the annotation `kubernetes.io/ingress.class` when creating Ingress resources to solve ACME challenges that use this challenge solver. Only one of `class`, `name` or `ingressClassName` may be specified. type: string ingressClassName: description: This field configures the field `ingressClassName` on the created Ingress resources used to solve ACME challenges that use this challenge solver. This is the recommended way of configuring the ingress class. Only one of `class`, `name` or `ingressClassName` may be specified. type: string ingressTemplate: description: Optional ingress template used to configure the ACME challenge solver ingress used for HTTP01 challenges. type: object properties: metadata: description: ObjectMeta overrides for the ingress used to solve HTTP01 challenges. Only the 'labels' and 'annotations' fields may be set. If labels or annotations overlap with in-built values, the values here will override the in-built values. type: object properties: annotations: description: Annotations that should be added to the created ACME HTTP01 solver ingress. type: object additionalProperties: type: string labels: description: Labels that should be added to the created ACME HTTP01 solver ingress. type: object additionalProperties: type: string name: description: The name of the ingress resource that should have ACME challenge solving routes inserted into it in order to solve HTTP01 challenges. This is typically used in conjunction with ingress controllers like ingress-gce, which maintains a 1:1 mapping between external IPs and ingress resources. Only one of `class`, `name` or `ingressClassName` may be specified. type: string podTemplate: description: Optional pod template used to configure the ACME challenge solver pods used for HTTP01 challenges. type: object properties: metadata: description: ObjectMeta overrides for the pod used to solve HTTP01 challenges. Only the 'labels' and 'annotations' fields may be set. If labels or annotations overlap with in-built values, the values here will override the in-built values. type: object properties: annotations: description: Annotations that should be added to the create ACME HTTP01 solver pods. type: object additionalProperties: type: string labels: description: Labels that should be added to the created ACME HTTP01 solver pods. type: object additionalProperties: type: string spec: description: PodSpec defines overrides for the HTTP01 challenge solver pod. Check ACMEChallengeSolverHTTP01IngressPodSpec to find out currently supported fields. All other fields will be ignored. type: object properties: affinity: description: If specified, the pod's scheduling constraints type: object properties: nodeAffinity: description: Describes node affinity scheduling rules for the pod. type: object properties: preferredDuringSchedulingIgnoredDuringExecution: description: The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred. type: array items: description: An empty preferred scheduling term matches all objects with implicit weight 0 (i.e. it's a no-op). A null preferred scheduling term matches no objects (i.e. is also a no-op). type: object required: - preference - weight properties: preference: description: A node selector term, associated with the corresponding weight. type: object properties: matchExpressions: description: A list of node selector requirements by node's labels. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string matchFields: description: A list of node selector requirements by node's fields. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string x-kubernetes-map-type: atomic weight: description: Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100. type: integer format: int32 requiredDuringSchedulingIgnoredDuringExecution: description: If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node. type: object required: - nodeSelectorTerms properties: nodeSelectorTerms: description: Required. A list of node selector terms. The terms are ORed. type: array items: description: A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm. type: object properties: matchExpressions: description: A list of node selector requirements by node's labels. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string matchFields: description: A list of node selector requirements by node's fields. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string x-kubernetes-map-type: atomic x-kubernetes-map-type: atomic podAffinity: description: Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)). type: object properties: preferredDuringSchedulingIgnoredDuringExecution: description: The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred. type: array items: description: The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s) type: object required: - podAffinityTerm - weight properties: podAffinityTerm: description: Required. A pod affinity term, associated with the corresponding weight. type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string weight: description: weight associated with matching the corresponding podAffinityTerm, in the range 1-100. type: integer format: int32 requiredDuringSchedulingIgnoredDuringExecution: description: If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied. type: array items: description: Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key matches that of any node on which a pod of the set of pods is running type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string podAntiAffinity: description: Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)). type: object properties: preferredDuringSchedulingIgnoredDuringExecution: description: The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred. type: array items: description: The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s) type: object required: - podAffinityTerm - weight properties: podAffinityTerm: description: Required. A pod affinity term, associated with the corresponding weight. type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string weight: description: weight associated with matching the corresponding podAffinityTerm, in the range 1-100. type: integer format: int32 requiredDuringSchedulingIgnoredDuringExecution: description: If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied. type: array items: description: Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key matches that of any node on which a pod of the set of pods is running type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string imagePullSecrets: description: If specified, the pod's imagePullSecrets type: array items: description: LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace. type: object properties: name: description: 'Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?' type: string x-kubernetes-map-type: atomic nodeSelector: description: 'NodeSelector is a selector which must be true for the pod to fit on a node. Selector which must match a node''s labels for the pod to be scheduled on that node. More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/' type: object additionalProperties: type: string priorityClassName: description: If specified, the pod's priorityClassName. type: string serviceAccountName: description: If specified, the pod's service account type: string tolerations: description: If specified, the pod's tolerations. type: array items: description: The pod this Toleration is attached to tolerates any taint that matches the triple using the matching operator . type: object properties: effect: description: Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute. type: string key: description: Key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys. type: string operator: description: Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category. type: string tolerationSeconds: description: TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system. type: integer format: int64 value: description: Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string. type: string serviceType: description: Optional service type for Kubernetes solver service. Supported values are NodePort or ClusterIP. If unset, defaults to NodePort. type: string selector: description: Selector selects a set of DNSNames on the Certificate resource that should be solved using this challenge solver. If not specified, the solver will be treated as the 'default' solver with the lowest priority, i.e. if any other solver has a more specific match, it will be used instead. type: object properties: dnsNames: description: List of DNSNames that this solver will be used to solve. If specified and a match is found, a dnsNames selector will take precedence over a dnsZones selector. If multiple solvers match with the same dnsNames value, the solver with the most matching labels in matchLabels will be selected. If neither has more matches, the solver defined earlier in the list will be selected. type: array items: type: string dnsZones: description: List of DNSZones that this solver will be used to solve. The most specific DNS zone match specified here will take precedence over other DNS zone matches, so a solver specifying sys.example.com will be selected over one specifying example.com for the domain www.sys.example.com. If multiple solvers match with the same dnsZones value, the solver with the most matching labels in matchLabels will be selected. If neither has more matches, the solver defined earlier in the list will be selected. type: array items: type: string matchLabels: description: A label selector that is used to refine the set of certificate's that this challenge solver will apply to. type: object additionalProperties: type: string ca: description: CA configures this issuer to sign certificates using a signing CA keypair stored in a Secret resource. This is used to build internal PKIs that are managed by cert-manager. type: object required: - secretName properties: crlDistributionPoints: description: The CRL distribution points is an X.509 v3 certificate extension which identifies the location of the CRL from which the revocation of this certificate can be checked. If not set, certificates will be issued without distribution points set. type: array items: type: string issuingCertificateURLs: description: IssuingCertificateURLs is a list of URLs which this issuer should embed into certificates it creates. See https://www.rfc-editor.org/rfc/rfc5280#section-4.2.2.1 for more details. As an example, such a URL might be "http://ca.domain.com/ca.crt". type: array items: type: string ocspServers: description: The OCSP server list is an X.509 v3 extension that defines a list of URLs of OCSP responders. The OCSP responders can be queried for the revocation status of an issued certificate. If not set, the certificate will be issued with no OCSP servers set. For example, an OCSP server URL could be "http://ocsp.int-x3.letsencrypt.org". type: array items: type: string secretName: description: SecretName is the name of the secret used to sign Certificates issued by this Issuer. type: string selfSigned: description: SelfSigned configures this issuer to 'self sign' certificates using the private key used to create the CertificateRequest object. type: object properties: crlDistributionPoints: description: The CRL distribution points is an X.509 v3 certificate extension which identifies the location of the CRL from which the revocation of this certificate can be checked. If not set certificate will be issued without CDP. Values are strings. type: array items: type: string vault: description: Vault configures this issuer to sign certificates using a HashiCorp Vault PKI backend. type: object required: - auth - path - server properties: auth: description: Auth configures how cert-manager authenticates with the Vault server. type: object properties: appRole: description: AppRole authenticates with Vault using the App Role auth mechanism, with the role and secret stored in a Kubernetes Secret resource. type: object required: - path - roleId - secretRef properties: path: description: 'Path where the App Role authentication backend is mounted in Vault, e.g: "approle"' type: string roleId: description: RoleID configured in the App Role authentication backend when setting up the authentication backend in Vault. type: string secretRef: description: Reference to a key in a Secret that contains the App Role secret used to authenticate with Vault. The `key` field must be specified and denotes which entry within the Secret resource is used as the app role secret. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string kubernetes: description: Kubernetes authenticates with Vault by passing the ServiceAccount token stored in the named Secret resource to the Vault server. type: object required: - role properties: mountPath: description: The Vault mountPath here is the mount path to use when authenticating with Vault. For example, setting a value to `/v1/auth/foo`, will use the path `/v1/auth/foo/login` to authenticate with Vault. If unspecified, the default value "/v1/auth/kubernetes" will be used. type: string role: description: A required field containing the Vault Role to assume. A Role binds a Kubernetes ServiceAccount with a set of Vault policies. type: string secretRef: description: The required Secret field containing a Kubernetes ServiceAccount JWT used for authenticating with Vault. Use of 'ambient credentials' is not supported. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string serviceAccountRef: description: A reference to a service account that will be used to request a bound token (also known as "projected token"). Compared to using "secretRef", using this field means that you don't rely on statically bound tokens. To use this field, you must configure an RBAC rule to let cert-manager request a token. type: object required: - name properties: name: description: Name of the ServiceAccount used to request a token. type: string tokenSecretRef: description: TokenSecretRef authenticates with Vault by presenting a token. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string caBundle: description: Base64-encoded bundle of PEM CAs which will be used to validate the certificate chain presented by Vault. Only used if using HTTPS to connect to Vault and ignored for HTTP connections. Mutually exclusive with CABundleSecretRef. If neither CABundle nor CABundleSecretRef are defined, the certificate bundle in the cert-manager controller container is used to validate the TLS connection. type: string format: byte caBundleSecretRef: description: Reference to a Secret containing a bundle of PEM-encoded CAs to use when verifying the certificate chain presented by Vault when using HTTPS. Mutually exclusive with CABundle. If neither CABundle nor CABundleSecretRef are defined, the certificate bundle in the cert-manager controller container is used to validate the TLS connection. If no key for the Secret is specified, cert-manager will default to 'ca.crt'. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string namespace: description: 'Name of the vault namespace. Namespaces is a set of features within Vault Enterprise that allows Vault environments to support Secure Multi-tenancy. e.g: "ns1" More about namespaces can be found here https://www.vaultproject.io/docs/enterprise/namespaces' type: string path: description: 'Path is the mount path of the Vault PKI backend''s `sign` endpoint, e.g: "my_pki_mount/sign/my-role-name".' type: string server: description: 'Server is the connection address for the Vault server, e.g: "https://vault.example.com:8200".' type: string venafi: description: Venafi configures this issuer to sign certificates using a Venafi TPP or Venafi Cloud policy zone. type: object required: - zone properties: cloud: description: Cloud specifies the Venafi cloud configuration settings. Only one of TPP or Cloud may be specified. type: object required: - apiTokenSecretRef properties: apiTokenSecretRef: description: APITokenSecretRef is a secret key selector for the Venafi Cloud API token. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string url: description: URL is the base URL for Venafi Cloud. Defaults to "https://api.venafi.cloud/v1". type: string tpp: description: TPP specifies Trust Protection Platform configuration settings. Only one of TPP or Cloud may be specified. type: object required: - credentialsRef - url properties: caBundle: description: Base64-encoded bundle of PEM CAs which will be used to validate the certificate chain presented by the TPP server. Only used if using HTTPS; ignored for HTTP. If undefined, the certificate bundle in the cert-manager controller container is used to validate the chain. type: string format: byte credentialsRef: description: CredentialsRef is a reference to a Secret containing the username and password for the TPP server. The secret must contain two keys, 'username' and 'password'. type: object required: - name properties: name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string url: description: 'URL is the base URL for the vedsdk endpoint of the Venafi TPP instance, for example: "https://tpp.example.com/vedsdk".' type: string zone: description: Zone is the Venafi Policy Zone to use for this issuer. All requests made to the Venafi platform will be restricted by the named zone policy. This field is required. type: string status: description: Status of the ClusterIssuer. This is set and managed automatically. type: object properties: acme: description: ACME specific status options. This field should only be set if the Issuer is configured to use an ACME server to issue certificates. type: object properties: lastPrivateKeyHash: description: LastPrivateKeyHash is a hash of the private key associated with the latest registered ACME account, in order to track changes made to registered account associated with the Issuer type: string lastRegisteredEmail: description: LastRegisteredEmail is the email associated with the latest registered ACME account, in order to track changes made to registered account associated with the Issuer type: string uri: description: URI is the unique account identifier, which can also be used to retrieve account details from the CA type: string conditions: description: List of status conditions to indicate the status of a CertificateRequest. Known condition types are `Ready`. type: array items: description: IssuerCondition contains condition information for an Issuer. type: object required: - status - type properties: lastTransitionTime: description: LastTransitionTime is the timestamp corresponding to the last status change of this condition. type: string format: date-time message: description: Message is a human readable description of the details of the last transition, complementing reason. type: string observedGeneration: description: If set, this represents the .metadata.generation that the condition was set based upon. For instance, if .metadata.generation is currently 12, but the .status.condition[x].observedGeneration is 9, the condition is out of date with respect to the current state of the Issuer. type: integer format: int64 reason: description: Reason is a brief machine readable explanation for the condition's last transition. type: string status: description: Status of the condition, one of (`True`, `False`, `Unknown`). type: string enum: - "True" - "False" - Unknown type: description: Type of the condition, known values are (`Ready`). type: string x-kubernetes-list-map-keys: - type x-kubernetes-list-type: map served: true storage: true --- # Source: cert-manager/templates/crds.yaml apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition metadata: name: issuers.cert-manager.io labels: app: 'cert-manager' app.kubernetes.io/name: 'cert-manager' app.kubernetes.io/instance: "cert-manager" # Generated labels app.kubernetes.io/version: "v1.14.4" spec: group: cert-manager.io names: kind: Issuer listKind: IssuerList plural: issuers singular: issuer categories: - cert-manager scope: Namespaced versions: - name: v1 subresources: status: {} additionalPrinterColumns: - jsonPath: .status.conditions[?(@.type=="Ready")].status name: Ready type: string - jsonPath: .status.conditions[?(@.type=="Ready")].message name: Status priority: 1 type: string - jsonPath: .metadata.creationTimestamp description: CreationTimestamp is a timestamp representing the server time when this object was created. It is not guaranteed to be set in happens-before order across separate operations. Clients may not set this value. It is represented in RFC3339 form and is in UTC. name: Age type: date schema: openAPIV3Schema: description: An Issuer represents a certificate issuing authority which can be referenced as part of `issuerRef` fields. It is scoped to a single namespace and can therefore only be referenced by resources within the same namespace. type: object required: - spec properties: apiVersion: description: 'APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' type: string kind: description: 'Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' type: string metadata: type: object spec: description: Desired state of the Issuer resource. type: object properties: acme: description: ACME configures this issuer to communicate with a RFC8555 (ACME) server to obtain signed x509 certificates. type: object required: - privateKeySecretRef - server properties: caBundle: description: Base64-encoded bundle of PEM CAs which can be used to validate the certificate chain presented by the ACME server. Mutually exclusive with SkipTLSVerify; prefer using CABundle to prevent various kinds of security vulnerabilities. If CABundle and SkipTLSVerify are unset, the system certificate bundle inside the container is used to validate the TLS connection. type: string format: byte disableAccountKeyGeneration: description: Enables or disables generating a new ACME account key. If true, the Issuer resource will *not* request a new account but will expect the account key to be supplied via an existing secret. If false, the cert-manager system will generate a new ACME account key for the Issuer. Defaults to false. type: boolean email: description: Email is the email address to be associated with the ACME account. This field is optional, but it is strongly recommended to be set. It will be used to contact you in case of issues with your account or certificates, including expiry notification emails. This field may be updated after the account is initially registered. type: string enableDurationFeature: description: Enables requesting a Not After date on certificates that matches the duration of the certificate. This is not supported by all ACME servers like Let's Encrypt. If set to true when the ACME server does not support it it will create an error on the Order. Defaults to false. type: boolean externalAccountBinding: description: ExternalAccountBinding is a reference to a CA external account of the ACME server. If set, upon registration cert-manager will attempt to associate the given external account credentials with the registered ACME account. type: object required: - keyID - keySecretRef properties: keyAlgorithm: description: 'Deprecated: keyAlgorithm field exists for historical compatibility reasons and should not be used. The algorithm is now hardcoded to HS256 in golang/x/crypto/acme.' type: string enum: - HS256 - HS384 - HS512 keyID: description: keyID is the ID of the CA key that the External Account is bound to. type: string keySecretRef: description: keySecretRef is a Secret Key Selector referencing a data item in a Kubernetes Secret which holds the symmetric MAC key of the External Account Binding. The `key` is the index string that is paired with the key data in the Secret and should not be confused with the key data itself, or indeed with the External Account Binding keyID above. The secret key stored in the Secret **must** be un-padded, base64 URL encoded data. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string preferredChain: description: 'PreferredChain is the chain to use if the ACME server outputs multiple. PreferredChain is no guarantee that this one gets delivered by the ACME endpoint. For example, for Let''s Encrypt''s DST crosssign you would use: "DST Root CA X3" or "ISRG Root X1" for the newer Let''s Encrypt root CA. This value picks the first certificate bundle in the ACME alternative chains that has a certificate with this value as its issuer''s CN' type: string maxLength: 64 privateKeySecretRef: description: PrivateKey is the name of a Kubernetes Secret resource that will be used to store the automatically generated ACME account private key. Optionally, a `key` may be specified to select a specific entry within the named Secret resource. If `key` is not specified, a default of `tls.key` will be used. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string server: description: 'Server is the URL used to access the ACME server''s ''directory'' endpoint. For example, for Let''s Encrypt''s staging endpoint, you would use: "https://acme-staging-v02.api.letsencrypt.org/directory". Only ACME v2 endpoints (i.e. RFC 8555) are supported.' type: string skipTLSVerify: description: 'INSECURE: Enables or disables validation of the ACME server TLS certificate. If true, requests to the ACME server will not have the TLS certificate chain validated. Mutually exclusive with CABundle; prefer using CABundle to prevent various kinds of security vulnerabilities. Only enable this option in development environments. If CABundle and SkipTLSVerify are unset, the system certificate bundle inside the container is used to validate the TLS connection. Defaults to false.' type: boolean solvers: description: 'Solvers is a list of challenge solvers that will be used to solve ACME challenges for the matching domains. Solver configurations must be provided in order to obtain certificates from an ACME server. For more information, see: https://cert-manager.io/docs/configuration/acme/' type: array items: description: An ACMEChallengeSolver describes how to solve ACME challenges for the issuer it is part of. A selector may be provided to use different solving strategies for different DNS names. Only one of HTTP01 or DNS01 must be provided. type: object properties: dns01: description: Configures cert-manager to attempt to complete authorizations by performing the DNS01 challenge flow. type: object properties: acmeDNS: description: Use the 'ACME DNS' (https://github.com/joohoi/acme-dns) API to manage DNS01 challenge records. type: object required: - accountSecretRef - host properties: accountSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string host: type: string akamai: description: Use the Akamai DNS zone management API to manage DNS01 challenge records. type: object required: - accessTokenSecretRef - clientSecretSecretRef - clientTokenSecretRef - serviceConsumerDomain properties: accessTokenSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string clientSecretSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string clientTokenSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string serviceConsumerDomain: type: string azureDNS: description: Use the Microsoft Azure DNS API to manage DNS01 challenge records. type: object required: - resourceGroupName - subscriptionID properties: clientID: description: 'Auth: Azure Service Principal: The ClientID of the Azure Service Principal used to authenticate with Azure DNS. If set, ClientSecret and TenantID must also be set.' type: string clientSecretSecretRef: description: 'Auth: Azure Service Principal: A reference to a Secret containing the password associated with the Service Principal. If set, ClientID and TenantID must also be set.' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string environment: description: name of the Azure environment (default AzurePublicCloud) type: string enum: - AzurePublicCloud - AzureChinaCloud - AzureGermanCloud - AzureUSGovernmentCloud hostedZoneName: description: name of the DNS zone that should be used type: string managedIdentity: description: 'Auth: Azure Workload Identity or Azure Managed Service Identity: Settings to enable Azure Workload Identity or Azure Managed Service Identity If set, ClientID, ClientSecret and TenantID must not be set.' type: object properties: clientID: description: client ID of the managed identity, can not be used at the same time as resourceID type: string resourceID: description: resource ID of the managed identity, can not be used at the same time as clientID Cannot be used for Azure Managed Service Identity type: string resourceGroupName: description: resource group the DNS zone is located in type: string subscriptionID: description: ID of the Azure subscription type: string tenantID: description: 'Auth: Azure Service Principal: The TenantID of the Azure Service Principal used to authenticate with Azure DNS. If set, ClientID and ClientSecret must also be set.' type: string cloudDNS: description: Use the Google Cloud DNS API to manage DNS01 challenge records. type: object required: - project properties: hostedZoneName: description: HostedZoneName is an optional field that tells cert-manager in which Cloud DNS zone the challenge record has to be created. If left empty cert-manager will automatically choose a zone. type: string project: type: string serviceAccountSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string cloudflare: description: Use the Cloudflare API to manage DNS01 challenge records. type: object properties: apiKeySecretRef: description: 'API key to use to authenticate with Cloudflare. Note: using an API token to authenticate is now the recommended method as it allows greater control of permissions.' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string apiTokenSecretRef: description: API token used to authenticate with Cloudflare. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string email: description: Email of the account, only required when using API key based authentication. type: string cnameStrategy: description: CNAMEStrategy configures how the DNS01 provider should handle CNAME records when found in DNS zones. type: string enum: - None - Follow digitalocean: description: Use the DigitalOcean DNS API to manage DNS01 challenge records. type: object required: - tokenSecretRef properties: tokenSecretRef: description: A reference to a specific 'key' within a Secret resource. In some instances, `key` is a required field. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string rfc2136: description: Use RFC2136 ("Dynamic Updates in the Domain Name System") (https://datatracker.ietf.org/doc/rfc2136/) to manage DNS01 challenge records. type: object required: - nameserver properties: nameserver: description: The IP address or hostname of an authoritative DNS server supporting RFC2136 in the form host:port. If the host is an IPv6 address it must be enclosed in square brackets (e.g [2001:db8::1]) ; port is optional. This field is required. type: string tsigAlgorithm: description: 'The TSIG Algorithm configured in the DNS supporting RFC2136. Used only when ``tsigSecretSecretRef`` and ``tsigKeyName`` are defined. Supported values are (case-insensitive): ``HMACMD5`` (default), ``HMACSHA1``, ``HMACSHA256`` or ``HMACSHA512``.' type: string tsigKeyName: description: The TSIG Key name configured in the DNS. If ``tsigSecretSecretRef`` is defined, this field is required. type: string tsigSecretSecretRef: description: The name of the secret containing the TSIG value. If ``tsigKeyName`` is defined, this field is required. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string route53: description: Use the AWS Route53 API to manage DNS01 challenge records. type: object required: - region properties: accessKeyID: description: 'The AccessKeyID is used for authentication. Cannot be set when SecretAccessKeyID is set. If neither the Access Key nor Key ID are set, we fall-back to using env vars, shared credentials file or AWS Instance metadata, see: https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials' type: string accessKeyIDSecretRef: description: 'The SecretAccessKey is used for authentication. If set, pull the AWS access key ID from a key within a Kubernetes Secret. Cannot be set when AccessKeyID is set. If neither the Access Key nor Key ID are set, we fall-back to using env vars, shared credentials file or AWS Instance metadata, see: https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string hostedZoneID: description: If set, the provider will manage only this zone in Route53 and will not do an lookup using the route53:ListHostedZonesByName api call. type: string region: description: Always set the region when using AccessKeyID and SecretAccessKey type: string role: description: Role is a Role ARN which the Route53 provider will assume using either the explicit credentials AccessKeyID/SecretAccessKey or the inferred credentials from environment variables, shared credentials file or AWS Instance metadata type: string secretAccessKeySecretRef: description: 'The SecretAccessKey is used for authentication. If neither the Access Key nor Key ID are set, we fall-back to using env vars, shared credentials file or AWS Instance metadata, see: https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials' type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string webhook: description: Configure an external webhook based DNS01 challenge solver to manage DNS01 challenge records. type: object required: - groupName - solverName properties: config: description: Additional configuration that should be passed to the webhook apiserver when challenges are processed. This can contain arbitrary JSON data. Secret values should not be specified in this stanza. If secret values are needed (e.g. credentials for a DNS service), you should use a SecretKeySelector to reference a Secret resource. For details on the schema of this field, consult the webhook provider implementation's documentation. x-kubernetes-preserve-unknown-fields: true groupName: description: The API group name that should be used when POSTing ChallengePayload resources to the webhook apiserver. This should be the same as the GroupName specified in the webhook provider implementation. type: string solverName: description: The name of the solver to use, as defined in the webhook provider implementation. This will typically be the name of the provider, e.g. 'cloudflare'. type: string http01: description: Configures cert-manager to attempt to complete authorizations by performing the HTTP01 challenge flow. It is not possible to obtain certificates for wildcard domain names (e.g. `*.example.com`) using the HTTP01 challenge mechanism. type: object properties: gatewayHTTPRoute: description: The Gateway API is a sig-network community API that models service networking in Kubernetes (https://gateway-api.sigs.k8s.io/). The Gateway solver will create HTTPRoutes with the specified labels in the same namespace as the challenge. This solver is experimental, and fields / behaviour may change in the future. type: object properties: labels: description: Custom labels that will be applied to HTTPRoutes created by cert-manager while solving HTTP-01 challenges. type: object additionalProperties: type: string parentRefs: description: 'When solving an HTTP-01 challenge, cert-manager creates an HTTPRoute. cert-manager needs to know which parentRefs should be used when creating the HTTPRoute. Usually, the parentRef references a Gateway. See: https://gateway-api.sigs.k8s.io/api-types/httproute/#attaching-to-gateways' type: array items: description: "ParentReference identifies an API object (usually a Gateway) that can be considered a parent of this resource (usually a route). There are two kinds of parent resources with \"Core\" support: \n * Gateway (Gateway conformance profile) * Service (Mesh conformance profile, experimental, ClusterIP Services only) \n This API may be extended in the future to support additional kinds of parent resources. \n The API object must be valid in the cluster; the Group and Kind must be registered in the cluster for this reference to be valid." type: object required: - name properties: group: description: "Group is the group of the referent. When unspecified, \"gateway.networking.k8s.io\" is inferred. To set the core API group (such as for a \"Service\" kind referent), Group must be explicitly set to \"\" (empty string). \n Support: Core" type: string default: gateway.networking.k8s.io maxLength: 253 pattern: ^$|^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$ kind: description: "Kind is kind of the referent. \n There are two kinds of parent resources with \"Core\" support: \n * Gateway (Gateway conformance profile) * Service (Mesh conformance profile, experimental, ClusterIP Services only) \n Support for other resources is Implementation-Specific." type: string default: Gateway maxLength: 63 minLength: 1 pattern: ^[a-zA-Z]([-a-zA-Z0-9]*[a-zA-Z0-9])?$ name: description: "Name is the name of the referent. \n Support: Core" type: string maxLength: 253 minLength: 1 namespace: description: "Namespace is the namespace of the referent. When unspecified, this refers to the local namespace of the Route. \n Note that there are specific rules for ParentRefs which cross namespace boundaries. Cross-namespace references are only valid if they are explicitly allowed by something in the namespace they are referring to. For example: Gateway has the AllowedRoutes field, and ReferenceGrant provides a generic way to enable any other kind of cross-namespace reference. \n ParentRefs from a Route to a Service in the same namespace are \"producer\" routes, which apply default routing rules to inbound connections from any namespace to the Service. \n ParentRefs from a Route to a Service in a different namespace are \"consumer\" routes, and these routing rules are only applied to outbound connections originating from the same namespace as the Route, for which the intended destination of the connections are a Service targeted as a ParentRef of the Route. \n Support: Core" type: string maxLength: 63 minLength: 1 pattern: ^[a-z0-9]([-a-z0-9]*[a-z0-9])?$ port: description: "Port is the network port this Route targets. It can be interpreted differently based on the type of parent resource. \n When the parent resource is a Gateway, this targets all listeners listening on the specified port that also support this kind of Route(and select this Route). It's not recommended to set `Port` unless the networking behaviors specified in a Route must apply to a specific port as opposed to a listener(s) whose port(s) may be changed. When both Port and SectionName are specified, the name and port of the selected listener must match both specified values. \n When the parent resource is a Service, this targets a specific port in the Service spec. When both Port (experimental) and SectionName are specified, the name and port of the selected port must match both specified values. \n Implementations MAY choose to support other parent resources. Implementations supporting other types of parent resources MUST clearly document how/if Port is interpreted. \n For the purpose of status, an attachment is considered successful as long as the parent resource accepts it partially. For example, Gateway listeners can restrict which Routes can attach to them by Route kind, namespace, or hostname. If 1 of 2 Gateway listeners accept attachment from the referencing Route, the Route MUST be considered successfully attached. If no Gateway listeners accept attachment from this Route, the Route MUST be considered detached from the Gateway. \n Support: Extended \n " type: integer format: int32 maximum: 65535 minimum: 1 sectionName: description: "SectionName is the name of a section within the target resource. In the following resources, SectionName is interpreted as the following: \n * Gateway: Listener Name. When both Port (experimental) and SectionName are specified, the name and port of the selected listener must match both specified values. * Service: Port Name. When both Port (experimental) and SectionName are specified, the name and port of the selected listener must match both specified values. Note that attaching Routes to Services as Parents is part of experimental Mesh support and is not supported for any other purpose. \n Implementations MAY choose to support attaching Routes to other resources. If that is the case, they MUST clearly document how SectionName is interpreted. \n When unspecified (empty string), this will reference the entire resource. For the purpose of status, an attachment is considered successful if at least one section in the parent resource accepts it. For example, Gateway listeners can restrict which Routes can attach to them by Route kind, namespace, or hostname. If 1 of 2 Gateway listeners accept attachment from the referencing Route, the Route MUST be considered successfully attached. If no Gateway listeners accept attachment from this Route, the Route MUST be considered detached from the Gateway. \n Support: Core" type: string maxLength: 253 minLength: 1 pattern: ^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$ serviceType: description: Optional service type for Kubernetes solver service. Supported values are NodePort or ClusterIP. If unset, defaults to NodePort. type: string ingress: description: The ingress based HTTP01 challenge solver will solve challenges by creating or modifying Ingress resources in order to route requests for '/.well-known/acme-challenge/XYZ' to 'challenge solver' pods that are provisioned by cert-manager for each Challenge to be completed. type: object properties: class: description: This field configures the annotation `kubernetes.io/ingress.class` when creating Ingress resources to solve ACME challenges that use this challenge solver. Only one of `class`, `name` or `ingressClassName` may be specified. type: string ingressClassName: description: This field configures the field `ingressClassName` on the created Ingress resources used to solve ACME challenges that use this challenge solver. This is the recommended way of configuring the ingress class. Only one of `class`, `name` or `ingressClassName` may be specified. type: string ingressTemplate: description: Optional ingress template used to configure the ACME challenge solver ingress used for HTTP01 challenges. type: object properties: metadata: description: ObjectMeta overrides for the ingress used to solve HTTP01 challenges. Only the 'labels' and 'annotations' fields may be set. If labels or annotations overlap with in-built values, the values here will override the in-built values. type: object properties: annotations: description: Annotations that should be added to the created ACME HTTP01 solver ingress. type: object additionalProperties: type: string labels: description: Labels that should be added to the created ACME HTTP01 solver ingress. type: object additionalProperties: type: string name: description: The name of the ingress resource that should have ACME challenge solving routes inserted into it in order to solve HTTP01 challenges. This is typically used in conjunction with ingress controllers like ingress-gce, which maintains a 1:1 mapping between external IPs and ingress resources. Only one of `class`, `name` or `ingressClassName` may be specified. type: string podTemplate: description: Optional pod template used to configure the ACME challenge solver pods used for HTTP01 challenges. type: object properties: metadata: description: ObjectMeta overrides for the pod used to solve HTTP01 challenges. Only the 'labels' and 'annotations' fields may be set. If labels or annotations overlap with in-built values, the values here will override the in-built values. type: object properties: annotations: description: Annotations that should be added to the create ACME HTTP01 solver pods. type: object additionalProperties: type: string labels: description: Labels that should be added to the created ACME HTTP01 solver pods. type: object additionalProperties: type: string spec: description: PodSpec defines overrides for the HTTP01 challenge solver pod. Check ACMEChallengeSolverHTTP01IngressPodSpec to find out currently supported fields. All other fields will be ignored. type: object properties: affinity: description: If specified, the pod's scheduling constraints type: object properties: nodeAffinity: description: Describes node affinity scheduling rules for the pod. type: object properties: preferredDuringSchedulingIgnoredDuringExecution: description: The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred. type: array items: description: An empty preferred scheduling term matches all objects with implicit weight 0 (i.e. it's a no-op). A null preferred scheduling term matches no objects (i.e. is also a no-op). type: object required: - preference - weight properties: preference: description: A node selector term, associated with the corresponding weight. type: object properties: matchExpressions: description: A list of node selector requirements by node's labels. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string matchFields: description: A list of node selector requirements by node's fields. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string x-kubernetes-map-type: atomic weight: description: Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100. type: integer format: int32 requiredDuringSchedulingIgnoredDuringExecution: description: If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node. type: object required: - nodeSelectorTerms properties: nodeSelectorTerms: description: Required. A list of node selector terms. The terms are ORed. type: array items: description: A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm. type: object properties: matchExpressions: description: A list of node selector requirements by node's labels. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string matchFields: description: A list of node selector requirements by node's fields. type: array items: description: A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: The label key that the selector applies to. type: string operator: description: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. type: string values: description: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch. type: array items: type: string x-kubernetes-map-type: atomic x-kubernetes-map-type: atomic podAffinity: description: Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)). type: object properties: preferredDuringSchedulingIgnoredDuringExecution: description: The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred. type: array items: description: The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s) type: object required: - podAffinityTerm - weight properties: podAffinityTerm: description: Required. A pod affinity term, associated with the corresponding weight. type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string weight: description: weight associated with matching the corresponding podAffinityTerm, in the range 1-100. type: integer format: int32 requiredDuringSchedulingIgnoredDuringExecution: description: If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied. type: array items: description: Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key matches that of any node on which a pod of the set of pods is running type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string podAntiAffinity: description: Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)). type: object properties: preferredDuringSchedulingIgnoredDuringExecution: description: The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred. type: array items: description: The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s) type: object required: - podAffinityTerm - weight properties: podAffinityTerm: description: Required. A pod affinity term, associated with the corresponding weight. type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string weight: description: weight associated with matching the corresponding podAffinityTerm, in the range 1-100. type: integer format: int32 requiredDuringSchedulingIgnoredDuringExecution: description: If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied. type: array items: description: Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key matches that of any node on which a pod of the set of pods is running type: object required: - topologyKey properties: labelSelector: description: A label query over a set of resources, in this case pods. If it's null, this PodAffinityTerm matches with no Pods. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic matchLabelKeys: description: MatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key in (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MatchLabelKeys and LabelSelector. Also, MatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic mismatchLabelKeys: description: MismatchLabelKeys is a set of pod label keys to select which pods will be taken into consideration. The keys are used to lookup values from the incoming pod labels, those key-value labels are merged with `LabelSelector` as `key notin (value)` to select the group of existing pods which pods will be taken into consideration for the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming pod labels will be ignored. The default value is empty. The same key is forbidden to exist in both MismatchLabelKeys and LabelSelector. Also, MismatchLabelKeys cannot be set when LabelSelector isn't set. This is an alpha field and requires enabling MatchLabelKeysInPodAffinity feature gate. type: array items: type: string x-kubernetes-list-type: atomic namespaceSelector: description: A label query over the set of namespaces that the term applies to. The term is applied to the union of the namespaces selected by this field and the ones listed in the namespaces field. null selector and null or empty namespaces list means "this pod's namespace". An empty selector ({}) matches all namespaces. type: object properties: matchExpressions: description: matchExpressions is a list of label selector requirements. The requirements are ANDed. type: array items: description: A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values. type: object required: - key - operator properties: key: description: key is the label key that the selector applies to. type: string operator: description: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. type: string values: description: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch. type: array items: type: string matchLabels: description: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed. type: object additionalProperties: type: string x-kubernetes-map-type: atomic namespaces: description: namespaces specifies a static list of namespace names that the term applies to. The term is applied to the union of the namespaces listed in this field and the ones selected by namespaceSelector. null or empty namespaces list and null namespaceSelector means "this pod's namespace". type: array items: type: string topologyKey: description: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed. type: string imagePullSecrets: description: If specified, the pod's imagePullSecrets type: array items: description: LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace. type: object properties: name: description: 'Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?' type: string x-kubernetes-map-type: atomic nodeSelector: description: 'NodeSelector is a selector which must be true for the pod to fit on a node. Selector which must match a node''s labels for the pod to be scheduled on that node. More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/' type: object additionalProperties: type: string priorityClassName: description: If specified, the pod's priorityClassName. type: string serviceAccountName: description: If specified, the pod's service account type: string tolerations: description: If specified, the pod's tolerations. type: array items: description: The pod this Toleration is attached to tolerates any taint that matches the triple using the matching operator . type: object properties: effect: description: Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute. type: string key: description: Key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys. type: string operator: description: Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category. type: string tolerationSeconds: description: TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system. type: integer format: int64 value: description: Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string. type: string serviceType: description: Optional service type for Kubernetes solver service. Supported values are NodePort or ClusterIP. If unset, defaults to NodePort. type: string selector: description: Selector selects a set of DNSNames on the Certificate resource that should be solved using this challenge solver. If not specified, the solver will be treated as the 'default' solver with the lowest priority, i.e. if any other solver has a more specific match, it will be used instead. type: object properties: dnsNames: description: List of DNSNames that this solver will be used to solve. If specified and a match is found, a dnsNames selector will take precedence over a dnsZones selector. If multiple solvers match with the same dnsNames value, the solver with the most matching labels in matchLabels will be selected. If neither has more matches, the solver defined earlier in the list will be selected. type: array items: type: string dnsZones: description: List of DNSZones that this solver will be used to solve. The most specific DNS zone match specified here will take precedence over other DNS zone matches, so a solver specifying sys.example.com will be selected over one specifying example.com for the domain www.sys.example.com. If multiple solvers match with the same dnsZones value, the solver with the most matching labels in matchLabels will be selected. If neither has more matches, the solver defined earlier in the list will be selected. type: array items: type: string matchLabels: description: A label selector that is used to refine the set of certificate's that this challenge solver will apply to. type: object additionalProperties: type: string ca: description: CA configures this issuer to sign certificates using a signing CA keypair stored in a Secret resource. This is used to build internal PKIs that are managed by cert-manager. type: object required: - secretName properties: crlDistributionPoints: description: The CRL distribution points is an X.509 v3 certificate extension which identifies the location of the CRL from which the revocation of this certificate can be checked. If not set, certificates will be issued without distribution points set. type: array items: type: string issuingCertificateURLs: description: IssuingCertificateURLs is a list of URLs which this issuer should embed into certificates it creates. See https://www.rfc-editor.org/rfc/rfc5280#section-4.2.2.1 for more details. As an example, such a URL might be "http://ca.domain.com/ca.crt". type: array items: type: string ocspServers: description: The OCSP server list is an X.509 v3 extension that defines a list of URLs of OCSP responders. The OCSP responders can be queried for the revocation status of an issued certificate. If not set, the certificate will be issued with no OCSP servers set. For example, an OCSP server URL could be "http://ocsp.int-x3.letsencrypt.org". type: array items: type: string secretName: description: SecretName is the name of the secret used to sign Certificates issued by this Issuer. type: string selfSigned: description: SelfSigned configures this issuer to 'self sign' certificates using the private key used to create the CertificateRequest object. type: object properties: crlDistributionPoints: description: The CRL distribution points is an X.509 v3 certificate extension which identifies the location of the CRL from which the revocation of this certificate can be checked. If not set certificate will be issued without CDP. Values are strings. type: array items: type: string vault: description: Vault configures this issuer to sign certificates using a HashiCorp Vault PKI backend. type: object required: - auth - path - server properties: auth: description: Auth configures how cert-manager authenticates with the Vault server. type: object properties: appRole: description: AppRole authenticates with Vault using the App Role auth mechanism, with the role and secret stored in a Kubernetes Secret resource. type: object required: - path - roleId - secretRef properties: path: description: 'Path where the App Role authentication backend is mounted in Vault, e.g: "approle"' type: string roleId: description: RoleID configured in the App Role authentication backend when setting up the authentication backend in Vault. type: string secretRef: description: Reference to a key in a Secret that contains the App Role secret used to authenticate with Vault. The `key` field must be specified and denotes which entry within the Secret resource is used as the app role secret. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string kubernetes: description: Kubernetes authenticates with Vault by passing the ServiceAccount token stored in the named Secret resource to the Vault server. type: object required: - role properties: mountPath: description: The Vault mountPath here is the mount path to use when authenticating with Vault. For example, setting a value to `/v1/auth/foo`, will use the path `/v1/auth/foo/login` to authenticate with Vault. If unspecified, the default value "/v1/auth/kubernetes" will be used. type: string role: description: A required field containing the Vault Role to assume. A Role binds a Kubernetes ServiceAccount with a set of Vault policies. type: string secretRef: description: The required Secret field containing a Kubernetes ServiceAccount JWT used for authenticating with Vault. Use of 'ambient credentials' is not supported. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string serviceAccountRef: description: A reference to a service account that will be used to request a bound token (also known as "projected token"). Compared to using "secretRef", using this field means that you don't rely on statically bound tokens. To use this field, you must configure an RBAC rule to let cert-manager request a token. type: object required: - name properties: name: description: Name of the ServiceAccount used to request a token. type: string tokenSecretRef: description: TokenSecretRef authenticates with Vault by presenting a token. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string caBundle: description: Base64-encoded bundle of PEM CAs which will be used to validate the certificate chain presented by Vault. Only used if using HTTPS to connect to Vault and ignored for HTTP connections. Mutually exclusive with CABundleSecretRef. If neither CABundle nor CABundleSecretRef are defined, the certificate bundle in the cert-manager controller container is used to validate the TLS connection. type: string format: byte caBundleSecretRef: description: Reference to a Secret containing a bundle of PEM-encoded CAs to use when verifying the certificate chain presented by Vault when using HTTPS. Mutually exclusive with CABundle. If neither CABundle nor CABundleSecretRef are defined, the certificate bundle in the cert-manager controller container is used to validate the TLS connection. If no key for the Secret is specified, cert-manager will default to 'ca.crt'. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string namespace: description: 'Name of the vault namespace. Namespaces is a set of features within Vault Enterprise that allows Vault environments to support Secure Multi-tenancy. e.g: "ns1" More about namespaces can be found here https://www.vaultproject.io/docs/enterprise/namespaces' type: string path: description: 'Path is the mount path of the Vault PKI backend''s `sign` endpoint, e.g: "my_pki_mount/sign/my-role-name".' type: string server: description: 'Server is the connection address for the Vault server, e.g: "https://vault.example.com:8200".' type: string venafi: description: Venafi configures this issuer to sign certificates using a Venafi TPP or Venafi Cloud policy zone. type: object required: - zone properties: cloud: description: Cloud specifies the Venafi cloud configuration settings. Only one of TPP or Cloud may be specified. type: object required: - apiTokenSecretRef properties: apiTokenSecretRef: description: APITokenSecretRef is a secret key selector for the Venafi Cloud API token. type: object required: - name properties: key: description: The key of the entry in the Secret resource's `data` field to be used. Some instances of this field may be defaulted, in others it may be required. type: string name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string url: description: URL is the base URL for Venafi Cloud. Defaults to "https://api.venafi.cloud/v1". type: string tpp: description: TPP specifies Trust Protection Platform configuration settings. Only one of TPP or Cloud may be specified. type: object required: - credentialsRef - url properties: caBundle: description: Base64-encoded bundle of PEM CAs which will be used to validate the certificate chain presented by the TPP server. Only used if using HTTPS; ignored for HTTP. If undefined, the certificate bundle in the cert-manager controller container is used to validate the chain. type: string format: byte credentialsRef: description: CredentialsRef is a reference to a Secret containing the username and password for the TPP server. The secret must contain two keys, 'username' and 'password'. type: object required: - name properties: name: description: 'Name of the resource being referred to. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' type: string url: description: 'URL is the base URL for the vedsdk endpoint of the Venafi TPP instance, for example: "https://tpp.example.com/vedsdk".' type: string zone: description: Zone is the Venafi Policy Zone to use for this issuer. All requests made to the Venafi platform will be restricted by the named zone policy. This field is required. type: string status: description: Status of the Issuer. This is set and managed automatically. type: object properties: acme: description: ACME specific status options. This field should only be set if the Issuer is configured to use an ACME server to issue certificates. type: object properties: lastPrivateKeyHash: description: LastPrivateKeyHash is a hash of the private key associated with the latest registered ACME account, in order to track changes made to registered account associated with the Issuer type: string lastRegisteredEmail: description: LastRegisteredEmail is the email associated with the latest registered ACME account, in order to track changes made to registered account associated with the Issuer type: string uri: description: URI is the unique account identifier, which can also be used to retrieve account details from the CA type: string conditions: description: List of status conditions to indicate the status of a CertificateRequest. Known condition types are `Ready`. type: array items: description: IssuerCondition contains condition information for an Issuer. type: object required: - status - type properties: lastTransitionTime: description: LastTransitionTime is the timestamp corresponding to the last status change of this condition. type: string format: date-time message: description: Message is a human readable description of the details of the last transition, complementing reason. type: string observedGeneration: description: If set, this represents the .metadata.generation that the condition was set based upon. For instance, if .metadata.generation is currently 12, but the .status.condition[x].observedGeneration is 9, the condition is out of date with respect to the current state of the Issuer. type: integer format: int64 reason: description: Reason is a brief machine readable explanation for the condition's last transition. type: string status: description: Status of the condition, one of (`True`, `False`, `Unknown`). type: string enum: - "True" - "False" - Unknown type: description: Type of the condition, known values are (`Ready`). type: string x-kubernetes-list-map-keys: - type x-kubernetes-list-type: map served: true storage: true --- # Source: cert-manager/templates/crds.yaml apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition metadata: name: orders.acme.cert-manager.io labels: app: 'cert-manager' app.kubernetes.io/name: 'cert-manager' app.kubernetes.io/instance: 'cert-manager' # Generated labels app.kubernetes.io/version: "v1.14.4" spec: group: acme.cert-manager.io names: kind: Order listKind: OrderList plural: orders singular: order categories: - cert-manager - cert-manager-acme scope: Namespaced versions: - name: v1 subresources: status: {} additionalPrinterColumns: - jsonPath: .status.state name: State type: string - jsonPath: .spec.issuerRef.name name: Issuer priority: 1 type: string - jsonPath: .status.reason name: Reason priority: 1 type: string - jsonPath: .metadata.creationTimestamp description: CreationTimestamp is a timestamp representing the server time when this object was created. It is not guaranteed to be set in happens-before order across separate operations. Clients may not set this value. It is represented in RFC3339 form and is in UTC. name: Age type: date schema: openAPIV3Schema: description: Order is a type to represent an Order with an ACME server type: object required: - metadata - spec properties: apiVersion: description: 'APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' type: string kind: description: 'Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' type: string metadata: type: object spec: type: object required: - issuerRef - request properties: commonName: description: CommonName is the common name as specified on the DER encoded CSR. If specified, this value must also be present in `dnsNames` or `ipAddresses`. This field must match the corresponding field on the DER encoded CSR. type: string dnsNames: description: DNSNames is a list of DNS names that should be included as part of the Order validation process. This field must match the corresponding field on the DER encoded CSR. type: array items: type: string duration: description: Duration is the duration for the not after date for the requested certificate. this is set on order creation as pe the ACME spec. type: string ipAddresses: description: IPAddresses is a list of IP addresses that should be included as part of the Order validation process. This field must match the corresponding field on the DER encoded CSR. type: array items: type: string issuerRef: description: IssuerRef references a properly configured ACME-type Issuer which should be used to create this Order. If the Issuer does not exist, processing will be retried. If the Issuer is not an 'ACME' Issuer, an error will be returned and the Order will be marked as failed. type: object required: - name properties: group: description: Group of the resource being referred to. type: string kind: description: Kind of the resource being referred to. type: string name: description: Name of the resource being referred to. type: string request: description: Certificate signing request bytes in DER encoding. This will be used when finalizing the order. This field must be set on the order. type: string format: byte status: type: object properties: authorizations: description: Authorizations contains data returned from the ACME server on what authorizations must be completed in order to validate the DNS names specified on the Order. type: array items: description: ACMEAuthorization contains data returned from the ACME server on an authorization that must be completed in order validate a DNS name on an ACME Order resource. type: object required: - url properties: challenges: description: Challenges specifies the challenge types offered by the ACME server. One of these challenge types will be selected when validating the DNS name and an appropriate Challenge resource will be created to perform the ACME challenge process. type: array items: description: Challenge specifies a challenge offered by the ACME server for an Order. An appropriate Challenge resource can be created to perform the ACME challenge process. type: object required: - token - type - url properties: token: description: Token is the token that must be presented for this challenge. This is used to compute the 'key' that must also be presented. type: string type: description: Type is the type of challenge being offered, e.g. 'http-01', 'dns-01', 'tls-sni-01', etc. This is the raw value retrieved from the ACME server. Only 'http-01' and 'dns-01' are supported by cert-manager, other values will be ignored. type: string url: description: URL is the URL of this challenge. It can be used to retrieve additional metadata about the Challenge from the ACME server. type: string identifier: description: Identifier is the DNS name to be validated as part of this authorization type: string initialState: description: InitialState is the initial state of the ACME authorization when first fetched from the ACME server. If an Authorization is already 'valid', the Order controller will not create a Challenge resource for the authorization. This will occur when working with an ACME server that enables 'authz reuse' (such as Let's Encrypt's production endpoint). If not set and 'identifier' is set, the state is assumed to be pending and a Challenge will be created. type: string enum: - valid - ready - pending - processing - invalid - expired - errored url: description: URL is the URL of the Authorization that must be completed type: string wildcard: description: Wildcard will be true if this authorization is for a wildcard DNS name. If this is true, the identifier will be the *non-wildcard* version of the DNS name. For example, if '*.example.com' is the DNS name being validated, this field will be 'true' and the 'identifier' field will be 'example.com'. type: boolean certificate: description: Certificate is a copy of the PEM encoded certificate for this Order. This field will be populated after the order has been successfully finalized with the ACME server, and the order has transitioned to the 'valid' state. type: string format: byte failureTime: description: FailureTime stores the time that this order failed. This is used to influence garbage collection and back-off. type: string format: date-time finalizeURL: description: FinalizeURL of the Order. This is used to obtain certificates for this order once it has been completed. type: string reason: description: Reason optionally provides more information about a why the order is in the current state. type: string state: description: State contains the current state of this Order resource. States 'success' and 'expired' are 'final' type: string enum: - valid - ready - pending - processing - invalid - expired - errored url: description: URL of the Order. This will initially be empty when the resource is first created. The Order controller will populate this field when the Order is first processed. This field will be immutable after it is initially set. type: string served: true storage: true --- # Source: cert-manager/templates/cainjector-serviceaccount.yaml apiVersion: v1 kind: ServiceAccount automountServiceAccountToken: true metadata: name: cert-manager-cainjector namespace: cert-manager labels: app: cainjector app.kubernetes.io/name: cainjector app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cainjector" app.kubernetes.io/version: "v1.14.4" --- # Source: cert-manager/templates/serviceaccount.yaml apiVersion: v1 kind: ServiceAccount automountServiceAccountToken: true metadata: name: cert-manager namespace: cert-manager labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" --- # Source: cert-manager/templates/webhook-serviceaccount.yaml apiVersion: v1 kind: ServiceAccount automountServiceAccountToken: true metadata: name: cert-manager-webhook namespace: cert-manager labels: app: webhook app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" app.kubernetes.io/version: "v1.14.4" --- # Source: cert-manager/templates/cainjector-rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-cainjector labels: app: cainjector app.kubernetes.io/name: cainjector app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cainjector" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: ["cert-manager.io"] resources: ["certificates"] verbs: ["get", "list", "watch"] - apiGroups: [""] resources: ["secrets"] verbs: ["get", "list", "watch"] - apiGroups: [""] resources: ["events"] verbs: ["get", "create", "update", "patch"] - apiGroups: ["admissionregistration.k8s.io"] resources: ["validatingwebhookconfigurations", "mutatingwebhookconfigurations"] verbs: ["get", "list", "watch", "update", "patch"] - apiGroups: ["apiregistration.k8s.io"] resources: ["apiservices"] verbs: ["get", "list", "watch", "update", "patch"] - apiGroups: ["apiextensions.k8s.io"] resources: ["customresourcedefinitions"] verbs: ["get", "list", "watch", "update", "patch"] --- # Source: cert-manager/templates/rbac.yaml # Issuer controller role apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-controller-issuers labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: ["cert-manager.io"] resources: ["issuers", "issuers/status"] verbs: ["update", "patch"] - apiGroups: ["cert-manager.io"] resources: ["issuers"] verbs: ["get", "list", "watch"] - apiGroups: [""] resources: ["secrets"] verbs: ["get", "list", "watch", "create", "update", "delete"] - apiGroups: [""] resources: ["events"] verbs: ["create", "patch"] --- # Source: cert-manager/templates/rbac.yaml # ClusterIssuer controller role apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-controller-clusterissuers labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: ["cert-manager.io"] resources: ["clusterissuers", "clusterissuers/status"] verbs: ["update", "patch"] - apiGroups: ["cert-manager.io"] resources: ["clusterissuers"] verbs: ["get", "list", "watch"] - apiGroups: [""] resources: ["secrets"] verbs: ["get", "list", "watch", "create", "update", "delete"] - apiGroups: [""] resources: ["events"] verbs: ["create", "patch"] --- # Source: cert-manager/templates/rbac.yaml # Certificates controller role apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-controller-certificates labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: ["cert-manager.io"] resources: ["certificates", "certificates/status", "certificaterequests", "certificaterequests/status"] verbs: ["update", "patch"] - apiGroups: ["cert-manager.io"] resources: ["certificates", "certificaterequests", "clusterissuers", "issuers"] verbs: ["get", "list", "watch"] # We require these rules to support users with the OwnerReferencesPermissionEnforcement # admission controller enabled: # https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers/#ownerreferencespermissionenforcement - apiGroups: ["cert-manager.io"] resources: ["certificates/finalizers", "certificaterequests/finalizers"] verbs: ["update"] - apiGroups: ["acme.cert-manager.io"] resources: ["orders"] verbs: ["create", "delete", "get", "list", "watch"] - apiGroups: [""] resources: ["secrets"] verbs: ["get", "list", "watch", "create", "update", "delete", "patch"] - apiGroups: [""] resources: ["events"] verbs: ["create", "patch"] --- # Source: cert-manager/templates/rbac.yaml # Orders controller role apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-controller-orders labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: ["acme.cert-manager.io"] resources: ["orders", "orders/status"] verbs: ["update", "patch"] - apiGroups: ["acme.cert-manager.io"] resources: ["orders", "challenges"] verbs: ["get", "list", "watch"] - apiGroups: ["cert-manager.io"] resources: ["clusterissuers", "issuers"] verbs: ["get", "list", "watch"] - apiGroups: ["acme.cert-manager.io"] resources: ["challenges"] verbs: ["create", "delete"] # We require these rules to support users with the OwnerReferencesPermissionEnforcement # admission controller enabled: # https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers/#ownerreferencespermissionenforcement - apiGroups: ["acme.cert-manager.io"] resources: ["orders/finalizers"] verbs: ["update"] - apiGroups: [""] resources: ["secrets"] verbs: ["get", "list", "watch"] - apiGroups: [""] resources: ["events"] verbs: ["create", "patch"] --- # Source: cert-manager/templates/rbac.yaml # Challenges controller role apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-controller-challenges labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" rules: # Use to update challenge resource status - apiGroups: ["acme.cert-manager.io"] resources: ["challenges", "challenges/status"] verbs: ["update", "patch"] # Used to watch challenge resources - apiGroups: ["acme.cert-manager.io"] resources: ["challenges"] verbs: ["get", "list", "watch"] # Used to watch challenges, issuer and clusterissuer resources - apiGroups: ["cert-manager.io"] resources: ["issuers", "clusterissuers"] verbs: ["get", "list", "watch"] # Need to be able to retrieve ACME account private key to complete challenges - apiGroups: [""] resources: ["secrets"] verbs: ["get", "list", "watch"] # Used to create events - apiGroups: [""] resources: ["events"] verbs: ["create", "patch"] # HTTP01 rules - apiGroups: [""] resources: ["pods", "services"] verbs: ["get", "list", "watch", "create", "delete"] - apiGroups: ["networking.k8s.io"] resources: ["ingresses"] verbs: ["get", "list", "watch", "create", "delete", "update"] - apiGroups: [ "gateway.networking.k8s.io" ] resources: [ "httproutes" ] verbs: ["get", "list", "watch", "create", "delete", "update"] # We require the ability to specify a custom hostname when we are creating # new ingress resources. # See: https://github.com/openshift/origin/blob/21f191775636f9acadb44fa42beeb4f75b255532/pkg/route/apiserver/admission/ingress_admission.go#L84-L148 - apiGroups: ["route.openshift.io"] resources: ["routes/custom-host"] verbs: ["create"] # We require these rules to support users with the OwnerReferencesPermissionEnforcement # admission controller enabled: # https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers/#ownerreferencespermissionenforcement - apiGroups: ["acme.cert-manager.io"] resources: ["challenges/finalizers"] verbs: ["update"] # DNS01 rules (duplicated above) - apiGroups: [""] resources: ["secrets"] verbs: ["get", "list", "watch"] --- # Source: cert-manager/templates/rbac.yaml # ingress-shim controller role apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-controller-ingress-shim labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: ["cert-manager.io"] resources: ["certificates", "certificaterequests"] verbs: ["create", "update", "delete"] - apiGroups: ["cert-manager.io"] resources: ["certificates", "certificaterequests", "issuers", "clusterissuers"] verbs: ["get", "list", "watch"] - apiGroups: ["networking.k8s.io"] resources: ["ingresses"] verbs: ["get", "list", "watch"] # We require these rules to support users with the OwnerReferencesPermissionEnforcement # admission controller enabled: # https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers/#ownerreferencespermissionenforcement - apiGroups: ["networking.k8s.io"] resources: ["ingresses/finalizers"] verbs: ["update"] - apiGroups: ["gateway.networking.k8s.io"] resources: ["gateways", "httproutes"] verbs: ["get", "list", "watch"] - apiGroups: ["gateway.networking.k8s.io"] resources: ["gateways/finalizers", "httproutes/finalizers"] verbs: ["update"] - apiGroups: [""] resources: ["events"] verbs: ["create", "patch"] --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-cluster-view labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" rbac.authorization.k8s.io/aggregate-to-cluster-reader: "true" rules: - apiGroups: ["cert-manager.io"] resources: ["clusterissuers"] verbs: ["get", "list", "watch"] --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-view labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" rbac.authorization.k8s.io/aggregate-to-view: "true" rbac.authorization.k8s.io/aggregate-to-edit: "true" rbac.authorization.k8s.io/aggregate-to-admin: "true" rbac.authorization.k8s.io/aggregate-to-cluster-reader: "true" rules: - apiGroups: ["cert-manager.io"] resources: ["certificates", "certificaterequests", "issuers"] verbs: ["get", "list", "watch"] - apiGroups: ["acme.cert-manager.io"] resources: ["challenges", "orders"] verbs: ["get", "list", "watch"] --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-edit labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" rbac.authorization.k8s.io/aggregate-to-edit: "true" rbac.authorization.k8s.io/aggregate-to-admin: "true" rules: - apiGroups: ["cert-manager.io"] resources: ["certificates", "certificaterequests", "issuers"] verbs: ["create", "delete", "deletecollection", "patch", "update"] - apiGroups: ["cert-manager.io"] resources: ["certificates/status"] verbs: ["update"] - apiGroups: ["acme.cert-manager.io"] resources: ["challenges", "orders"] verbs: ["create", "delete", "deletecollection", "patch", "update"] --- # Source: cert-manager/templates/rbac.yaml # Permission to approve CertificateRequests referencing cert-manager.io Issuers and ClusterIssuers apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-controller-approve:cert-manager-io labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cert-manager" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: ["cert-manager.io"] resources: ["signers"] verbs: ["approve"] resourceNames: ["issuers.cert-manager.io/*", "clusterissuers.cert-manager.io/*"] --- # Source: cert-manager/templates/rbac.yaml # Permission to: # - Update and sign CertificatSigningeRequests referencing cert-manager.io Issuers and ClusterIssuers # - Perform SubjectAccessReviews to test whether users are able to reference Namespaced Issuers apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-controller-certificatesigningrequests labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cert-manager" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: ["certificates.k8s.io"] resources: ["certificatesigningrequests"] verbs: ["get", "list", "watch", "update"] - apiGroups: ["certificates.k8s.io"] resources: ["certificatesigningrequests/status"] verbs: ["update", "patch"] - apiGroups: ["certificates.k8s.io"] resources: ["signers"] resourceNames: ["issuers.cert-manager.io/*", "clusterissuers.cert-manager.io/*"] verbs: ["sign"] - apiGroups: ["authorization.k8s.io"] resources: ["subjectaccessreviews"] verbs: ["create"] --- # Source: cert-manager/templates/webhook-rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: name: cert-manager-webhook:subjectaccessreviews labels: app: webhook app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: ["authorization.k8s.io"] resources: ["subjectaccessreviews"] verbs: ["create"] --- # Source: cert-manager/templates/cainjector-rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: cert-manager-cainjector labels: app: cainjector app.kubernetes.io/name: cainjector app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cainjector" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: cert-manager-cainjector subjects: - name: cert-manager-cainjector namespace: cert-manager kind: ServiceAccount --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: cert-manager-controller-issuers labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: cert-manager-controller-issuers subjects: - name: cert-manager namespace: cert-manager kind: ServiceAccount --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: cert-manager-controller-clusterissuers labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: cert-manager-controller-clusterissuers subjects: - name: cert-manager namespace: cert-manager kind: ServiceAccount --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: cert-manager-controller-certificates labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: cert-manager-controller-certificates subjects: - name: cert-manager namespace: cert-manager kind: ServiceAccount --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: cert-manager-controller-orders labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: cert-manager-controller-orders subjects: - name: cert-manager namespace: cert-manager kind: ServiceAccount --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: cert-manager-controller-challenges labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: cert-manager-controller-challenges subjects: - name: cert-manager namespace: cert-manager kind: ServiceAccount --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: cert-manager-controller-ingress-shim labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: cert-manager-controller-ingress-shim subjects: - name: cert-manager namespace: cert-manager kind: ServiceAccount --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: cert-manager-controller-approve:cert-manager-io labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cert-manager" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: cert-manager-controller-approve:cert-manager-io subjects: - name: cert-manager namespace: cert-manager kind: ServiceAccount --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: cert-manager-controller-certificatesigningrequests labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cert-manager" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: cert-manager-controller-certificatesigningrequests subjects: - name: cert-manager namespace: cert-manager kind: ServiceAccount --- # Source: cert-manager/templates/webhook-rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: cert-manager-webhook:subjectaccessreviews labels: app: webhook app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: cert-manager-webhook:subjectaccessreviews subjects: - apiGroup: "" kind: ServiceAccount name: cert-manager-webhook namespace: cert-manager --- # Source: cert-manager/templates/cainjector-rbac.yaml # leader election rules apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: name: cert-manager-cainjector:leaderelection namespace: kube-system labels: app: cainjector app.kubernetes.io/name: cainjector app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cainjector" app.kubernetes.io/version: "v1.14.4" rules: # Used for leader election by the controller # cert-manager-cainjector-leader-election is used by the CertificateBased injector controller # see cmd/cainjector/start.go#L113 # cert-manager-cainjector-leader-election-core is used by the SecretBased injector controller # see cmd/cainjector/start.go#L137 - apiGroups: ["coordination.k8s.io"] resources: ["leases"] resourceNames: ["cert-manager-cainjector-leader-election", "cert-manager-cainjector-leader-election-core"] verbs: ["get", "update", "patch"] - apiGroups: ["coordination.k8s.io"] resources: ["leases"] verbs: ["create"] --- # Source: cert-manager/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: name: cert-manager:leaderelection namespace: kube-system labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: ["coordination.k8s.io"] resources: ["leases"] resourceNames: ["cert-manager-controller"] verbs: ["get", "update", "patch"] - apiGroups: ["coordination.k8s.io"] resources: ["leases"] verbs: ["create"] --- # Source: cert-manager/templates/webhook-rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: name: cert-manager-webhook:dynamic-serving namespace: cert-manager labels: app: webhook app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" app.kubernetes.io/version: "v1.14.4" rules: - apiGroups: [""] resources: ["secrets"] resourceNames: - 'cert-manager-webhook-ca' verbs: ["get", "list", "watch", "update"] # It's not possible to grant CREATE permission on a single resourceName. - apiGroups: [""] resources: ["secrets"] verbs: ["create"] --- # Source: cert-manager/templates/cainjector-rbac.yaml # grant cert-manager permission to manage the leaderelection configmap in the # leader election namespace apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: name: cert-manager-cainjector:leaderelection namespace: kube-system labels: app: cainjector app.kubernetes.io/name: cainjector app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cainjector" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: Role name: cert-manager-cainjector:leaderelection subjects: - kind: ServiceAccount name: cert-manager-cainjector namespace: cert-manager --- # Source: cert-manager/templates/rbac.yaml # grant cert-manager permission to manage the leaderelection configmap in the # leader election namespace apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: name: cert-manager:leaderelection namespace: kube-system labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: Role name: cert-manager:leaderelection subjects: - apiGroup: "" kind: ServiceAccount name: cert-manager namespace: cert-manager --- # Source: cert-manager/templates/webhook-rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: name: cert-manager-webhook:dynamic-serving namespace: cert-manager labels: app: webhook app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" app.kubernetes.io/version: "v1.14.4" roleRef: apiGroup: rbac.authorization.k8s.io kind: Role name: cert-manager-webhook:dynamic-serving subjects: - apiGroup: "" kind: ServiceAccount name: cert-manager-webhook namespace: cert-manager --- # Source: cert-manager/templates/service.yaml apiVersion: v1 kind: Service metadata: name: cert-manager namespace: cert-manager labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" spec: type: ClusterIP ports: - protocol: TCP port: 9402 name: tcp-prometheus-servicemonitor targetPort: 9402 selector: app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" --- # Source: cert-manager/templates/webhook-service.yaml apiVersion: v1 kind: Service metadata: name: cert-manager-webhook namespace: cert-manager labels: app: webhook app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" app.kubernetes.io/version: "v1.14.4" spec: type: ClusterIP ports: - name: https port: 443 protocol: TCP targetPort: "https" selector: app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" --- # Source: cert-manager/templates/cainjector-deployment.yaml apiVersion: apps/v1 kind: Deployment metadata: name: cert-manager-cainjector namespace: cert-manager labels: app: cainjector app.kubernetes.io/name: cainjector app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cainjector" app.kubernetes.io/version: "v1.14.4" spec: replicas: 1 selector: matchLabels: app.kubernetes.io/name: cainjector app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cainjector" template: metadata: labels: app: cainjector app.kubernetes.io/name: cainjector app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "cainjector" app.kubernetes.io/version: "v1.14.4" spec: serviceAccountName: cert-manager-cainjector enableServiceLinks: false securityContext: runAsNonRoot: true seccompProfile: type: RuntimeDefault containers: - name: cert-manager-cainjector image: "quay.io/jetstack/cert-manager-cainjector:v1.14.4" imagePullPolicy: IfNotPresent args: - --v=2 - --leader-election-namespace=kube-system env: - name: POD_NAMESPACE valueFrom: fieldRef: fieldPath: metadata.namespace securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL readOnlyRootFilesystem: true nodeSelector: kubernetes.io/os: linux --- # Source: cert-manager/templates/deployment.yaml apiVersion: apps/v1 kind: Deployment metadata: name: cert-manager namespace: cert-manager labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" spec: replicas: 1 selector: matchLabels: app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" template: metadata: labels: app: cert-manager app.kubernetes.io/name: cert-manager app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "controller" app.kubernetes.io/version: "v1.14.4" annotations: prometheus.io/path: "/metrics" prometheus.io/scrape: 'true' prometheus.io/port: '9402' spec: serviceAccountName: cert-manager enableServiceLinks: false securityContext: runAsNonRoot: true seccompProfile: type: RuntimeDefault containers: - name: cert-manager-controller image: "quay.io/jetstack/cert-manager-controller:v1.14.4" imagePullPolicy: IfNotPresent args: - --v=2 - --cluster-resource-namespace=$(POD_NAMESPACE) - --leader-election-namespace=kube-system - --acme-http01-solver-image=quay.io/jetstack/cert-manager-acmesolver:v1.14.4 - --max-concurrent-challenges=60 ports: - containerPort: 9402 name: http-metrics protocol: TCP - containerPort: 9403 name: http-healthz protocol: TCP securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL readOnlyRootFilesystem: true env: - name: POD_NAMESPACE valueFrom: fieldRef: fieldPath: metadata.namespace # LivenessProbe settings are based on those used for the Kubernetes # controller-manager. See: # https://github.com/kubernetes/kubernetes/blob/806b30170c61a38fedd54cc9ede4cd6275a1ad3b/cmd/kubeadm/app/util/staticpod/utils.go#L241-L245 livenessProbe: httpGet: port: http-healthz path: /livez scheme: HTTP initialDelaySeconds: 10 periodSeconds: 10 timeoutSeconds: 15 successThreshold: 1 failureThreshold: 8 nodeSelector: kubernetes.io/os: linux --- # Source: cert-manager/templates/webhook-deployment.yaml apiVersion: apps/v1 kind: Deployment metadata: name: cert-manager-webhook namespace: cert-manager labels: app: webhook app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" app.kubernetes.io/version: "v1.14.4" spec: replicas: 1 selector: matchLabels: app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" template: metadata: labels: app: webhook app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" app.kubernetes.io/version: "v1.14.4" spec: serviceAccountName: cert-manager-webhook enableServiceLinks: false securityContext: runAsNonRoot: true seccompProfile: type: RuntimeDefault containers: - name: cert-manager-webhook image: "quay.io/jetstack/cert-manager-webhook:v1.14.4" imagePullPolicy: IfNotPresent args: - --v=2 - --secure-port=10250 - --dynamic-serving-ca-secret-namespace=$(POD_NAMESPACE) - --dynamic-serving-ca-secret-name=cert-manager-webhook-ca - --dynamic-serving-dns-names=cert-manager-webhook - --dynamic-serving-dns-names=cert-manager-webhook.$(POD_NAMESPACE) - --dynamic-serving-dns-names=cert-manager-webhook.$(POD_NAMESPACE).svc ports: - name: https protocol: TCP containerPort: 10250 - name: healthcheck protocol: TCP containerPort: 6080 livenessProbe: httpGet: path: /livez port: 6080 scheme: HTTP initialDelaySeconds: 60 periodSeconds: 10 timeoutSeconds: 1 successThreshold: 1 failureThreshold: 3 readinessProbe: httpGet: path: /healthz port: 6080 scheme: HTTP initialDelaySeconds: 5 periodSeconds: 5 timeoutSeconds: 1 successThreshold: 1 failureThreshold: 3 securityContext: allowPrivilegeEscalation: false capabilities: drop: - ALL readOnlyRootFilesystem: true env: - name: POD_NAMESPACE valueFrom: fieldRef: fieldPath: metadata.namespace nodeSelector: kubernetes.io/os: linux --- # Source: cert-manager/templates/webhook-mutating-webhook.yaml apiVersion: admissionregistration.k8s.io/v1 kind: MutatingWebhookConfiguration metadata: name: cert-manager-webhook labels: app: webhook app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" app.kubernetes.io/version: "v1.14.4" annotations: cert-manager.io/inject-ca-from-secret: "cert-manager/cert-manager-webhook-ca" webhooks: - name: webhook.cert-manager.io rules: - apiGroups: - "cert-manager.io" apiVersions: - "v1" operations: - CREATE resources: - "certificaterequests" admissionReviewVersions: ["v1"] # This webhook only accepts v1 cert-manager resources. # Equivalent matchPolicy ensures that non-v1 resource requests are sent to # this webhook (after the resources have been converted to v1). matchPolicy: Equivalent timeoutSeconds: 30 failurePolicy: Fail # Only include 'sideEffects' field in Kubernetes 1.12+ sideEffects: None clientConfig: service: name: cert-manager-webhook namespace: cert-manager path: /mutate --- # Source: cert-manager/templates/webhook-validating-webhook.yaml apiVersion: admissionregistration.k8s.io/v1 kind: ValidatingWebhookConfiguration metadata: name: cert-manager-webhook labels: app: webhook app.kubernetes.io/name: webhook app.kubernetes.io/instance: cert-manager app.kubernetes.io/component: "webhook" app.kubernetes.io/version: "v1.14.4" annotations: cert-manager.io/inject-ca-from-secret: "cert-manager/cert-manager-webhook-ca" webhooks: - name: webhook.cert-manager.io namespaceSelector: matchExpressions: - key: cert-manager.io/disable-validation operator: NotIn values: - "true" rules: - apiGroups: - "cert-manager.io" - "acme.cert-manager.io" apiVersions: - "v1" operations: - CREATE - UPDATE resources: - "*/*" admissionReviewVersions: ["v1"] # This webhook only accepts v1 cert-manager resources. # Equivalent matchPolicy ensures that non-v1 resource requests are sent to # this webhook (after the resources have been converted to v1). matchPolicy: Equivalent timeoutSeconds: 30 failurePolicy: Fail sideEffects: None clientConfig: service: name: cert-manager-webhook namespace: cert-manager path: /validate home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/0000755000175000017500000000000015071030120026310 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/cr/0000755000175000017500000000000015071030120026714 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/0000755000175000017500000000000015071030235027433 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/dataplane/0000755000175000017500000000000015071030352031364 5ustar zuulzuul././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/dataplane/99-kustomization.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/dataplane/99-kustomi0000644000175000017500000000764315071030236033254 0ustar zuulzuulapiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: openstack patches: - target: kind: OpenStackDataPlaneNodeSet patch: |- - op: replace path: /spec/nodes/edpm-compute-0/hostName value: "compute-0" - op: replace path: /spec/nodes/edpm-compute-1/hostName value: "compute-1" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/neutron_public_interface_name value: "eth1" - op: replace path: /spec/nodes/edpm-compute-0/networks/0/defaultRoute value: false - op: replace path: /spec/nodes/edpm-compute-1/networks/0/defaultRoute value: false - op: replace path: /spec/nodes/edpm-compute-1/ansible/ansibleHost value: >- 192.168.122.101 - op: replace path: /spec/nodes/edpm-compute-1/networks/0/fixedIP value: >- 192.168.122.101 - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_os_net_config_mappings value: net_config_data_lookup: edpm-compute: nic2: "eth1" - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_network_config_debug value: true - op: add path: /spec/env value: {} - op: add path: /spec/env value: - name: "ANSIBLE_VERBOSITY" value: "2" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/edpm_network_config_template value: |- --- {% set mtu_list = [ctlplane_mtu] %} {% for network in nodeset_networks %} {% set _ = mtu_list.append(lookup('vars', networks_lower[network] ~ '_mtu')) %} {%- endfor %} {% set min_viable_mtu = mtu_list | max %} network_config: - type: interface name: nic1 use_dhcp: true mtu: {{ min_viable_mtu }} - type: ovs_bridge name: {{ neutron_physical_bridge_name }} mtu: {{ min_viable_mtu }} use_dhcp: false dns_servers: {{ ctlplane_dns_nameservers }} domain: {{ dns_search_domains }} addresses: - ip_netmask: {{ ctlplane_ip }}/{{ ctlplane_cidr }} routes: {{ ctlplane_host_routes }} members: - type: interface name: nic2 mtu: {{ min_viable_mtu }} # force the MAC address of the bridge to this interface primary: true {% if edpm_network_config_nmstate | bool %} # this ovs_extra configuration fixes OSPRH-17551, but it will be not needed when FDP-1472 is resolved ovs_extra: - "set interface eth1 external-ids:ovn-egress-iface=true" {% endif %} {% for network in nodeset_networks %} - type: vlan mtu: {{ lookup('vars', networks_lower[network] ~ '_mtu') }} vlan_id: {{ lookup('vars', networks_lower[network] ~ '_vlan_id') }} addresses: - ip_netmask: {{ lookup('vars', networks_lower[network] ~ '_ip') }}/{{ lookup('vars', networks_lower[network] ~ '_cidr') }} routes: {{ lookup('vars', networks_lower[network] ~ '_host_routes') }} {% endfor %} - op: replace path: /spec/nodeTemplate/ansible/ansibleUser value: "zuul" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/ctlplane_dns_nameservers value: - "192.168.122.10" - "199.204.44.24" - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_container_registry_insecure_registries value: ["38.102.83.53:5001"] - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_sshd_allowed_ranges value: ["0.0.0.0/0"] - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/edpm_telemetry_enabled_exporters value: - "podman_exporter" - "openstack_network_exporter" home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/controlplane/0000755000175000017500000000000015071030352032133 5ustar zuulzuul././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/controlplane/80-horizon-kustomization.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/controlplane/80-hori0000644000175000017500000000046115071030244033245 0ustar zuulzuulapiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization namespace: openstack patches: - target: kind: OpenStackControlPlane patch: |- - op: add path: /spec/horizon/enabled value: true - op: add path: /spec/horizon/template/memcachedInstance value: memcached././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/controlplane/99-kustomization.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/controlplane/99-kust0000644000175000017500000000062215071030235033303 0ustar zuulzuulapiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: openstack patches: - target: kind: OpenStackControlPlane patch: |- - op: replace path: /spec/dns/template/options value: [ { "key": "server", "values": [ "192.168.122.10" ] }, { "key": "no-negcache", "values": [] } ]home/zuul/zuul-output/logs/ci-framework-data/artifacts/post_infra_fetch_nodes_facts_and_save_the.yml0000644000175000017500000000053115071030236033447 0ustar zuulzuulcifmw_edpm_deploy_extra_vars: DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_SINGLE_NODE: 'false' DATAPLANE_SSHD_ALLOWED_RANGES: '[''0.0.0.0/0'']' DATAPLANE_TOTAL_NODES: 2 SSH_KEY_FILE: /home/zuul/.ssh/id_cifw cifmw_edpm_prepare_extra_vars: NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 home/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/0000755000175000017500000000000015071030272025264 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ens3.nmconnection0000644000175000017500000000026215071030272030550 0ustar zuulzuul[connection] id=ens3 uuid=852c6cad-9a2b-4161-afef-c3d6db7bf62c type=ethernet interface-name=ens3 [ethernet] [ipv4] method=auto [ipv6] addr-gen-mode=eui64 method=auto [proxy] ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network.nmconnectionhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network.nmconnectio0000644000175000017500000000051315071030272033253 0ustar zuulzuul[connection] id=ci-private-network uuid=ad24a046-d256-5cef-b3aa-35f1e225d40c type=ethernet autoconnect=true interface-name=eth1 [ethernet] mac-address=fa:16:3e:fc:47:4f mtu=1500 [ipv4] method=manual addresses=192.168.122.11/24 never-default=true gateway=192.168.122.1 [ipv6] addr-gen-mode=stable-privacy method=disabled [proxy] home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/0000755000175000017500000000000015071030272024362 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/delorean.repo.md50000644000175000017500000000004115071030272027521 0ustar zuulzuulb78cfc68a577b1553523c8a70a34e297 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-appstream.repo0000644000175000017500000000031615071030272032637 0ustar zuulzuul [repo-setup-centos-appstream] name=repo-setup-centos-appstream baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/AppStream/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-baseos.repo0000644000175000017500000000030415071030272032114 0ustar zuulzuul [repo-setup-centos-baseos] name=repo-setup-centos-baseos baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/BaseOS/$basearch/os/ gpgcheck=0 enabled=1 ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-highavailability.repohome/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-highavailability.0000644000175000017500000000034215071030272033266 0ustar zuulzuul [repo-setup-centos-highavailability] name=repo-setup-centos-highavailability baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/HighAvailability/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-powertools.repo0000644000175000017500000000031115071030272033053 0ustar zuulzuul [repo-setup-centos-powertools] name=repo-setup-centos-powertools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/CRB/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/delorean-antelope-testing.repo0000644000175000017500000000316315071030272032325 0ustar zuulzuul[delorean-antelope-testing] name=dlrn-antelope-testing baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [delorean-antelope-build-deps] name=dlrn-antelope-build-deps baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/build-deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-rabbitmq] name=centos9-rabbitmq baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/messaging/$basearch/rabbitmq-38/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-storage] name=centos9-storage baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/storage/$basearch/ceph-reef/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-opstools] name=centos9-opstools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/opstools/$basearch/collectd-5/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-nfv-ovs] name=NFV SIG OpenvSwitch baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/nfv/$basearch/openvswitch-2/ gpgcheck=0 enabled=1 module_hotfixes=1 # epel is required for Ceph Reef [epel-low-priority] name=Extra Packages for Enterprise Linux $releasever - $basearch metalink=https://mirrors.fedoraproject.org/metalink?repo=epel-$releasever&arch=$basearch&infra=$infra&content=$contentdir enabled=1 gpgcheck=0 countme=1 priority=100 includepkgs=libarrow*,parquet*,python3-asyncssh,re2,python3-grpcio,grpc*,abseil*,thrift* home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/delorean.repo0000644000175000017500000001336715071030272027054 0ustar zuulzuul[delorean-component-barbican] name=delorean-openstack-barbican-42b4c41831408a8e323fec3c8983b5c793b64874 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/barbican/42/b4/42b4c41831408a8e323fec3c8983b5c793b64874_08052e9d enabled=1 gpgcheck=0 priority=1 [delorean-component-baremetal] name=delorean-python-glean-10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/baremetal/10/df/10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7_36137eb3 enabled=1 gpgcheck=0 priority=1 [delorean-component-cinder] name=delorean-openstack-cinder-1c00d6490d88e436f26efb71f2ac96e75252e97c baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cinder/1c/00/1c00d6490d88e436f26efb71f2ac96e75252e97c_f716f000 enabled=1 gpgcheck=0 priority=1 [delorean-component-clients] name=delorean-python-stevedore-c4acc5639fd2329372142e39464fcca0209b0018 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/clients/c4/ac/c4acc5639fd2329372142e39464fcca0209b0018_d3ef8337 enabled=1 gpgcheck=0 priority=1 [delorean-component-cloudops] name=delorean-python-cloudkitty-tests-tempest-3961dcddb873b1ff6710d7df0739c4285dd71f8c baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cloudops/39/61/3961dcddb873b1ff6710d7df0739c4285dd71f8c_33e4dd93 enabled=1 gpgcheck=0 priority=1 [delorean-component-common] name=delorean-diskimage-builder-43381184423c185801b5e24f5f3e1e40bb7496f8 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/common/43/38/43381184423c185801b5e24f5f3e1e40bb7496f8_bf6d4aba enabled=1 gpgcheck=0 priority=1 [delorean-component-compute] name=delorean-openstack-nova-6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/compute/6f/8d/6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e_dc05b899 enabled=1 gpgcheck=0 priority=1 [delorean-component-designate] name=delorean-python-designate-tests-tempest-347fdbc9b4595a10b726526b3c0b5928e5b7fcf2 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/designate/34/7f/347fdbc9b4595a10b726526b3c0b5928e5b7fcf2_3fd39337 enabled=1 gpgcheck=0 priority=1 [delorean-component-glance] name=delorean-openstack-glance-1fd12c29b339f30fe823e2b5beba14b5f241e52a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/glance/1f/d1/1fd12c29b339f30fe823e2b5beba14b5f241e52a_0d693729 enabled=1 gpgcheck=0 priority=1 [delorean-component-keystone] name=delorean-openstack-keystone-e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/keystone/e4/b4/e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7_264c03cc enabled=1 gpgcheck=0 priority=1 [delorean-component-manila] name=delorean-openstack-manila-3c01b7181572c95dac462eb19c3121e36cb0fe95 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/manila/3c/01/3c01b7181572c95dac462eb19c3121e36cb0fe95_912dfd18 enabled=1 gpgcheck=0 priority=1 [delorean-component-network] name=delorean-python-vmware-nsxlib-458234972d1428ac92bbeff26511edfdc49b6b2f baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/network/45/82/458234972d1428ac92bbeff26511edfdc49b6b2f_1bca6328 enabled=1 gpgcheck=0 priority=1 [delorean-component-octavia] name=delorean-openstack-octavia-ba397f07a7331190208c93368ee23826ac4e2707 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/octavia/ba/39/ba397f07a7331190208c93368ee23826ac4e2707_9d6e596a enabled=1 gpgcheck=0 priority=1 [delorean-component-optimize] name=delorean-openstack-watcher-c014f81a8647287f6dcc339321c1256f5a2e82d5 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/optimize/c0/14/c014f81a8647287f6dcc339321c1256f5a2e82d5_bcbfdccc enabled=1 gpgcheck=0 priority=1 [delorean-component-podified] name=delorean-edpm-image-builder-55ba53cf215b14ed95bc80c8e8ed4b29a45fd4ae baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/podified/55/ba/55ba53cf215b14ed95bc80c8e8ed4b29a45fd4ae_419d1901 enabled=1 gpgcheck=0 priority=1 [delorean-component-puppet] name=delorean-puppet-ceph-b0c245ccde541a63fde0564366c6a8247cf9fb4f baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/puppet/b0/c2/b0c245ccde541a63fde0564366c6a8247cf9fb4f_7cde1ad1 enabled=1 gpgcheck=0 priority=1 [delorean-component-swift] name=delorean-openstack-swift-dc98a8463506ac520c469adb0ef47d0f7753905a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/swift/dc/98/dc98a8463506ac520c469adb0ef47d0f7753905a_9d02f069 enabled=1 gpgcheck=0 priority=1 [delorean-component-tempest] name=delorean-python-tempestconf-8515371b7cceebd4282e09f1d8f0cc842df82855 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/tempest/85/15/8515371b7cceebd4282e09f1d8f0cc842df82855_a1e336c7 enabled=1 gpgcheck=0 priority=1 [delorean-component-ui] name=delorean-openstack-heat-ui-013accbfd179753bc3f0d1f4e5bed07a4fd9f771 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/ui/01/3a/013accbfd179753bc3f0d1f4e5bed07a4fd9f771_0c88e467 enabled=1 gpgcheck=0 priority=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci-env/0000755000175000017500000000000015071030272023521 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci-env/networking-info.yml0000644000175000017500000000536615071030272027376 0ustar zuulzuulcrc_ci_bootstrap_networks_out: compute-0: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:1a:6b:7b mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:ec:df:aa mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:e1:d1:4a mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:6e:fd:3e mtu: '1496' parent_iface: eth1 vlan: 22 compute-1: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.101/24 mac: fa:16:3e:cb:47:1e mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.101/24 mac: 52:54:00:69:15:f1 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.101/24 mac: 52:54:00:c3:9b:d0 mtu: '1496' parent_iface: eth1 vlan: 21 tenant: iface: eth1.22 ip: 172.19.0.101/24 mac: 52:54:00:1d:8d:c8 mtu: '1496' parent_iface: eth1 vlan: 22 controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:fc:47:4f mtu: '1500' crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:36:76:9b mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:aa:79:c3 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:bd:b2:92 mtu: '1496' parent_iface: ens7 vlan: 21 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:86:1f:43 mtu: '1496' parent_iface: ens7 vlan: 22 crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_run_hook_without_retry.sh0000644000175000017500000000166315071030127032153 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_run_hook_without_retry.log) 2>&1 export ANSIBLE_CONFIG="/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/ansible.cfg" export ANSIBLE_LOG_PATH="/home/zuul/ci-framework-data/logs/pre_infra_download_needed_tools.log" ansible-playbook -i localhost, -c local -e operator_namespace=openstack-operators -e namespace=openstack -e "@/home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml" -e "cifmw_basedir=/home/zuul/ci-framework-data" -e "step=pre_infra" -e "hook_name=download_needed_tools" -e "playbook_dir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_001_fetch_openshift.sh0000644000175000017500000000032515071030167030466 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_001_fetch_openshift.log) 2>&1 oc login -u kubeadmin -p 123456789 --insecure-skip-tls-verify=true api.crc.testing:6443 home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_002_run_hook_without_retry_fetch.sh0000644000175000017500000000212615071030224033317 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_002_run_hook_without_retry_fetch.log) 2>&1 export ANSIBLE_CONFIG="/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/ansible.cfg" export ANSIBLE_LOG_PATH="/home/zuul/ci-framework-data/logs/post_infra_fetch_nodes_facts_and_save_the.log" ansible-playbook -i /home/zuul/ci-framework-data/artifacts/zuul_inventory.yml -e operator_namespace=openstack-operators -e namespace=openstack -e "@/home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml" -e "cifmw_basedir=/home/zuul/ci-framework-data" -e "step=post_infra" -e "hook_name=fetch_nodes_facts_and_save_the" -e "playbook_dir=/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/hooks/playbooks" /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/hooks/playbooks/fetch_compute_facts.yml home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_003_run_hook_without_retry_80.sh0000644000175000017500000000211315071030243032453 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_003_run_hook_without_retry_80.log) 2>&1 export ANSIBLE_CONFIG="/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/ansible.cfg" export ANSIBLE_LOG_PATH="/home/zuul/ci-framework-data/logs/pre_deploy_80_kustomize_openstack_cr.log" ansible-playbook -i /home/zuul/ci-framework-data/artifacts/zuul_inventory.yml -e operator_namespace=openstack-operators -e namespace=openstack -e "@/home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml" -e "cifmw_basedir=/home/zuul/ci-framework-data" -e "step=pre_deploy" -e "hook_name=80_kustomize_openstack_cr" -e "playbook_dir=/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/hooks/playbooks" /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/hooks/playbooks/control_plane_horizon.yml ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_004_run_hook_without_retry_create.shhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_004_run_hook_without_retry_create.s0000644000175000017500000000213615071030246033330 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_004_run_hook_without_retry_create.log) 2>&1 export ANSIBLE_CONFIG="/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/ansible.cfg" export ANSIBLE_LOG_PATH="/home/zuul/ci-framework-data/logs/pre_deploy_create_coo_subscription.log" ansible-playbook -i /home/zuul/ci-framework-data/artifacts/zuul_inventory.yml -e operator_namespace=openstack-operators -e namespace=openstack -e "@/home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml" -e "cifmw_basedir=/home/zuul/ci-framework-data" -e "step=pre_deploy" -e "hook_name=create_coo_subscription" -e "playbook_dir=/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/playbooks" /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/playbooks/deploy_cluster_observability_operator.yaml home/zuul/zuul-output/logs/ci-framework-data/artifacts/resolv.conf0000644000175000017500000000015215071030272024517 0ustar zuulzuul# Generated by NetworkManager nameserver 192.168.122.10 nameserver 199.204.44.24 nameserver 199.204.47.54 home/zuul/zuul-output/logs/ci-framework-data/artifacts/hosts0000644000175000017500000000023715071030272023425 0ustar zuulzuul127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4 ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 home/zuul/zuul-output/logs/ci-framework-data/artifacts/ip-network.txt0000644000175000017500000000315415071030272025203 0ustar zuulzuuldefault via 38.102.83.1 dev eth0 proto dhcp src 38.102.83.51 metric 100 38.102.83.0/24 dev eth0 proto kernel scope link src 38.102.83.51 metric 100 169.254.169.254 via 38.102.83.126 dev eth0 proto dhcp src 38.102.83.51 metric 100 192.168.122.0/24 dev eth1 proto kernel scope link src 192.168.122.11 metric 101 0: from all lookup local 32766: from all lookup main 32767: from all lookup default [ { "ifindex": 1, "ifname": "lo", "flags": [ "LOOPBACK","UP","LOWER_UP" ], "mtu": 65536, "qdisc": "noqueue", "operstate": "UNKNOWN", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "loopback", "address": "00:00:00:00:00:00", "broadcast": "00:00:00:00:00:00" },{ "ifindex": 2, "ifname": "eth0", "flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ], "mtu": 1500, "qdisc": "fq_codel", "operstate": "UP", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "ether", "address": "fa:16:3e:6f:08:20", "broadcast": "ff:ff:ff:ff:ff:ff", "altnames": [ "enp0s3","ens3" ] },{ "ifindex": 3, "ifname": "eth1", "flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ], "mtu": 1500, "qdisc": "fq_codel", "operstate": "UP", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "ether", "address": "fa:16:3e:fc:47:4f", "broadcast": "ff:ff:ff:ff:ff:ff", "altnames": [ "enp0s7","ens7" ] } ] home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_check_for_oc.sh0000644000175000017500000000020715071030311027707 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_check_for_oc.log) 2>&1 command -v oc home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_run_openstack_must_gather.sh0000644000175000017500000000106415071030311032562 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_run_openstack_must_gather.log) 2>&1 oc adm must-gather --image quay.io/openstack-k8s-operators/openstack-must-gather:latest --timeout 10m --host-network=False --dest-dir /home/zuul/ci-framework-data/logs/openstack-k8s-operators-openstack-must-gather -- ADDITIONAL_NAMESPACES=kuttl,openshift-storage,openshift-marketplace,openshift-operators,sushy-emulator,tobiko OPENSTACK_DATABASES=$OPENSTACK_DATABASES SOS_EDPM=$SOS_EDPM SOS_DECOMPRESS=$SOS_DECOMPRESS gather 2>&1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_prepare_root_ssh.sh0000644000175000017500000000122315071030327030667 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_prepare_root_ssh.log) 2>&1 ssh -i ~/.ssh/id_cifw core@api.crc.testing < >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_copy_logs_from_crc.log) 2>&1 scp -v -r -i ~/.ssh/id_cifw core@api.crc.testing:/tmp/crc-logs-artifacts /home/zuul/ci-framework-data/logs/crc/ home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13/0000777000175000017500000000000015071030372026715 5ustar zuulzuul././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13/ansible_facts_cache/home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13/ansible_facts_0000755000175000017500000000000015071030372031566 5ustar zuulzuul././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13/ansible_facts_cache/compute-0home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13/ansible_facts_0000644000175000017500000005560715071030372031605 0ustar zuulzuul{ "_ansible_facts_gathered": true, "ansible_all_ipv4_addresses": [ "38.102.83.32", "192.168.122.100" ], "ansible_all_ipv6_addresses": [ "fe80::f816:3eff:fe0e:bbd6" ], "ansible_apparmor": { "status": "disabled" }, "ansible_architecture": "x86_64", "ansible_bios_date": "04/01/2014", "ansible_bios_vendor": "SeaBIOS", "ansible_bios_version": "1.15.0-1", "ansible_board_asset_tag": "NA", "ansible_board_name": "NA", "ansible_board_serial": "NA", "ansible_board_vendor": "NA", "ansible_board_version": "NA", "ansible_chassis_asset_tag": "NA", "ansible_chassis_serial": "NA", "ansible_chassis_vendor": "QEMU", "ansible_chassis_version": "pc-i440fx-6.2", "ansible_cmdline": { "BOOT_IMAGE": "(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64", "console": "ttyS0,115200n8", "crashkernel": "1G-2G:192M,2G-64G:256M,64G-:512M", "net.ifnames": "0", "no_timer_check": true, "ro": true, "root": "UUID=1631a6ad-43b8-436d-ae76-16fa14b94458" }, "ansible_date_time": { "date": "2025-10-06", "day": "06", "epoch": "1759785110", "epoch_int": "1759785110", "hour": "21", "iso8601": "2025-10-06T21:11:50Z", "iso8601_basic": "20251006T211150682767", "iso8601_basic_short": "20251006T211150", "iso8601_micro": "2025-10-06T21:11:50.682767Z", "minute": "11", "month": "10", "second": "50", "time": "21:11:50", "tz": "UTC", "tz_dst": "UTC", "tz_offset": "+0000", "weekday": "Monday", "weekday_number": "1", "weeknumber": "40", "year": "2025" }, "ansible_default_ipv4": { "address": "38.102.83.32", "alias": "eth0", "broadcast": "38.102.83.255", "gateway": "38.102.83.1", "interface": "eth0", "macaddress": "fa:16:3e:0e:bb:d6", "mtu": 1500, "netmask": "255.255.255.0", "network": "38.102.83.0", "prefix": "24", "type": "ether" }, "ansible_default_ipv6": {}, "ansible_device_links": { "ids": { "sr0": [ "ata-QEMU_DVD-ROM_QM00001" ] }, "labels": { "sr0": [ "config-2" ] }, "masters": {}, "uuids": { "sr0": [ "2025-10-06-20-54-49-00" ], "vda1": [ "1631a6ad-43b8-436d-ae76-16fa14b94458" ] } }, "ansible_devices": { "sr0": { "holders": [], "host": "", "links": { "ids": [ "ata-QEMU_DVD-ROM_QM00001" ], "labels": [ "config-2" ], "masters": [], "uuids": [ "2025-10-06-20-54-49-00" ] }, "model": "QEMU DVD-ROM", "partitions": {}, "removable": "1", "rotational": "0", "sas_address": null, "sas_device_handle": null, "scheduler_mode": "mq-deadline", "sectors": "964", "sectorsize": "2048", "size": "482.00 KB", "support_discard": "2048", "vendor": "QEMU", "virtual": 1 }, "vda": { "holders": [], "host": "", "links": { "ids": [], "labels": [], "masters": [], "uuids": [] }, "model": null, "partitions": { "vda1": { "holders": [], "links": { "ids": [], "labels": [], "masters": [], "uuids": [ "1631a6ad-43b8-436d-ae76-16fa14b94458" ] }, "sectors": "167770079", "sectorsize": 512, "size": "80.00 GB", "start": "2048", "uuid": "1631a6ad-43b8-436d-ae76-16fa14b94458" } }, "removable": "0", "rotational": "1", "sas_address": null, "sas_device_handle": null, "scheduler_mode": "none", "sectors": "167772160", "sectorsize": "512", "size": "80.00 GB", "support_discard": "512", "vendor": "0x1af4", "virtual": 1 } }, "ansible_distribution": "CentOS", "ansible_distribution_file_parsed": true, "ansible_distribution_file_path": "/etc/centos-release", "ansible_distribution_file_variety": "CentOS", "ansible_distribution_major_version": "9", "ansible_distribution_release": "Stream", "ansible_distribution_version": "9", "ansible_dns": { "nameservers": [ "199.204.44.24", "199.204.47.54" ] }, "ansible_domain": "", "ansible_effective_group_id": 1000, "ansible_effective_user_id": 1000, "ansible_env": { "BASH_FUNC_which%%": "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}", "DBUS_SESSION_BUS_ADDRESS": "unix:path=/run/user/1000/bus", "DEBUGINFOD_IMA_CERT_PATH": "/etc/keys/ima:", "DEBUGINFOD_URLS": "https://debuginfod.centos.org/ ", "HOME": "/home/zuul", "LANG": "en_US.UTF-8", "LESSOPEN": "||/usr/bin/lesspipe.sh %s", "LOGNAME": "zuul", "MOTD_SHOWN": "pam", "PATH": "/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin", "PWD": "/home/zuul", "SELINUX_LEVEL_REQUESTED": "", "SELINUX_ROLE_REQUESTED": "", "SELINUX_USE_CURRENT_RANGE": "", "SHELL": "/bin/bash", "SHLVL": "1", "SSH_CLIENT": "38.102.83.51 52360 22", "SSH_CONNECTION": "38.102.83.51 52360 38.102.83.32 22", "USER": "zuul", "XDG_RUNTIME_DIR": "/run/user/1000", "XDG_SESSION_CLASS": "user", "XDG_SESSION_ID": "7", "XDG_SESSION_TYPE": "tty", "_": "/usr/bin/python3", "which_declare": "declare -f" }, "ansible_eth0": { "active": true, "device": "eth0", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "on", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "on [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "off [fixed]", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "on [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [fixed]", "tx_sctp_segmentation": "off [fixed]", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "off", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "off [fixed]", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "38.102.83.32", "broadcast": "38.102.83.255", "netmask": "255.255.255.0", "network": "38.102.83.0", "prefix": "24" }, "ipv6": [ { "address": "fe80::f816:3eff:fe0e:bbd6", "prefix": "64", "scope": "link" } ], "macaddress": "fa:16:3e:0e:bb:d6", "module": "virtio_net", "mtu": 1500, "pciid": "virtio1", "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_eth1": { "active": true, "device": "eth1", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "on", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "on [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "off [fixed]", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "on [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [fixed]", "tx_sctp_segmentation": "off [fixed]", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "off", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "off [fixed]", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "192.168.122.100", "broadcast": "192.168.122.255", "netmask": "255.255.255.0", "network": "192.168.122.0", "prefix": "24" }, "macaddress": "fa:16:3e:1a:6b:7b", "module": "virtio_net", "mtu": 1500, "pciid": "virtio5", "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_fibre_channel_wwn": [], "ansible_fips": false, "ansible_form_factor": "Other", "ansible_fqdn": "compute-0", "ansible_hostname": "compute-0", "ansible_hostnqn": "nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0", "ansible_interfaces": [ "eth0", "lo", "eth1" ], "ansible_is_chroot": false, "ansible_iscsi_iqn": "", "ansible_kernel": "5.14.0-620.el9.x86_64", "ansible_kernel_version": "#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025", "ansible_lo": { "active": true, "device": "lo", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "on [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "off [fixed]", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "off [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on [fixed]", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "on [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "on", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "off [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off [fixed]", "tx_scatter_gather": "on [fixed]", "tx_scatter_gather_fraglist": "on [fixed]", "tx_sctp_segmentation": "on", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "on", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "on", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "on [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "127.0.0.1", "broadcast": "", "netmask": "255.0.0.0", "network": "127.0.0.0", "prefix": "8" }, "ipv6": [ { "address": "::1", "prefix": "128", "scope": "host" } ], "mtu": 65536, "promisc": false, "timestamping": [], "type": "loopback" }, "ansible_loadavg": { "15m": 0.16, "1m": 0.05, "5m": 0.28 }, "ansible_local": {}, "ansible_locally_reachable_ips": { "ipv4": [ "38.102.83.32", "127.0.0.0/8", "127.0.0.1", "192.168.122.100" ], "ipv6": [ "::1", "fe80::f816:3eff:fe0e:bbd6" ] }, "ansible_lsb": {}, "ansible_lvm": "N/A", "ansible_machine": "x86_64", "ansible_machine_id": "42833e1b511a402df82cb9cb2fc36491", "ansible_memfree_mb": 6861, "ansible_memory_mb": { "nocache": { "free": 7315, "used": 364 }, "real": { "free": 6861, "total": 7679, "used": 818 }, "swap": { "cached": 0, "free": 0, "total": 0, "used": 0 } }, "ansible_memtotal_mb": 7679, "ansible_mounts": [ { "block_available": 20340051, "block_size": 4096, "block_total": 20954875, "block_used": 614824, "device": "/dev/vda1", "fstype": "xfs", "inode_available": 41887492, "inode_total": 41942512, "inode_used": 55020, "mount": "/", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 83312848896, "size_total": 85831168000, "uuid": "1631a6ad-43b8-436d-ae76-16fa14b94458" } ], "ansible_nodename": "compute-0", "ansible_os_family": "RedHat", "ansible_pkg_mgr": "dnf", "ansible_proc_cmdline": { "BOOT_IMAGE": "(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64", "console": "ttyS0,115200n8", "crashkernel": "1G-2G:192M,2G-64G:256M,64G-:512M", "net.ifnames": "0", "no_timer_check": true, "ro": true, "root": "UUID=1631a6ad-43b8-436d-ae76-16fa14b94458" }, "ansible_processor": [ "0", "AuthenticAMD", "AMD EPYC-Rome Processor", "1", "AuthenticAMD", "AMD EPYC-Rome Processor", "2", "AuthenticAMD", "AMD EPYC-Rome Processor", "3", "AuthenticAMD", "AMD EPYC-Rome Processor", "4", "AuthenticAMD", "AMD EPYC-Rome Processor", "5", "AuthenticAMD", "AMD EPYC-Rome Processor", "6", "AuthenticAMD", "AMD EPYC-Rome Processor", "7", "AuthenticAMD", "AMD EPYC-Rome Processor" ], "ansible_processor_cores": 1, "ansible_processor_count": 8, "ansible_processor_nproc": 8, "ansible_processor_threads_per_core": 1, "ansible_processor_vcpus": 8, "ansible_product_name": "OpenStack Nova", "ansible_product_serial": "NA", "ansible_product_uuid": "NA", "ansible_product_version": "26.2.1", "ansible_python": { "executable": "/usr/bin/python3", "has_sslcontext": true, "type": "cpython", "version": { "major": 3, "micro": 23, "minor": 9, "releaselevel": "final", "serial": 0 }, "version_info": [ 3, 9, 23, "final", 0 ] }, "ansible_python_version": "3.9.23", "ansible_real_group_id": 1000, "ansible_real_user_id": 1000, "ansible_selinux": { "config_mode": "enforcing", "mode": "enforcing", "policyvers": 33, "status": "enabled", "type": "targeted" }, "ansible_selinux_python_present": true, "ansible_service_mgr": "systemd", "ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBIg93BG4tSk/84rOcp3l7cvT84i0YRnciDkOHZlwA3wQIFgrL1A0rqYai7TpTc1TpKzwhnzmSp31Tf1y362AD5M=", "ansible_ssh_host_key_ecdsa_public_keytype": "ecdsa-sha2-nistp256", "ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAIPFRJ9KVOyWI6b5GPMrBBHR84riUKq85zgonp/Fzjoym", "ansible_ssh_host_key_ed25519_public_keytype": "ssh-ed25519", "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABgQDUrGIqXWX1G68eUKbYgZhPfXeqi0/aRwJ752atv5Fvu5YvwS+xZ9qPFN1/H0W6GqdzZ2BIjA8F9fjVp0kUEiFo0A2RuRUOkcrXmArHjai5O+ndkpDgh7A6b9FhR5uwB7ADP+oDKz4tEvPnIhpveDDYPIyiDVZvJH9EQJJ4FiYph2ILtSvWACLyW6wcx9Zh5TA5EljES6LEyvbSP0v1LS7xW9mB2GQbYOebDcODeIYoCZcW/GqC7iVay5Lyfa1BVecf0xDcwifAb1A/0izjJT5Es3eAYvHW89zG84GLCvvtYfsvlFYoq58okz5oMrPQ7I2ypFZ2njF5Hkb3Hbpj2E/Fku66rJhbhevVuIvCxoW/r/YQ/w4r7YU30LMs9GSOITwwxgCt6kshYGAOW6oMfjvp7r5fVwSqZxl36++Xb58XoziGdog7cpXjG0fbWJnDi1pYhIaOnzH203XUdOLntFy969F+tMQ0AoWIt8vLK1LmtB6lOiLnTFkiWRs9i+ziczc=", "ansible_ssh_host_key_rsa_public_keytype": "ssh-rsa", "ansible_swapfree_mb": 0, "ansible_swaptotal_mb": 0, "ansible_system": "Linux", "ansible_system_capabilities": [ "" ], "ansible_system_capabilities_enforced": "True", "ansible_system_vendor": "OpenStack Foundation", "ansible_uptime_seconds": 1010, "ansible_user_dir": "/home/zuul", "ansible_user_gecos": "", "ansible_user_gid": 1000, "ansible_user_id": "zuul", "ansible_user_shell": "/bin/bash", "ansible_user_uid": 1000, "ansible_userspace_architecture": "x86_64", "ansible_userspace_bits": "64", "ansible_virtualization_role": "guest", "ansible_virtualization_tech_guest": [ "openstack" ], "ansible_virtualization_tech_host": [ "kvm" ], "ansible_virtualization_type": "openstack", "discovered_interpreter_python": "/usr/bin/python3", "gather_subset": [ "all" ], "module_setup": true }././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13/ansible_facts_cache/compute-1home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13/ansible_facts_0000644000175000017500000005561315071030372031602 0ustar zuulzuul{ "_ansible_facts_gathered": true, "ansible_all_ipv4_addresses": [ "192.168.122.101", "38.102.83.194" ], "ansible_all_ipv6_addresses": [ "fe80::f816:3eff:fe66:118a" ], "ansible_apparmor": { "status": "disabled" }, "ansible_architecture": "x86_64", "ansible_bios_date": "04/01/2014", "ansible_bios_vendor": "SeaBIOS", "ansible_bios_version": "1.15.0-1", "ansible_board_asset_tag": "NA", "ansible_board_name": "NA", "ansible_board_serial": "NA", "ansible_board_vendor": "NA", "ansible_board_version": "NA", "ansible_chassis_asset_tag": "NA", "ansible_chassis_serial": "NA", "ansible_chassis_vendor": "QEMU", "ansible_chassis_version": "pc-i440fx-6.2", "ansible_cmdline": { "BOOT_IMAGE": "(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64", "console": "ttyS0,115200n8", "crashkernel": "1G-2G:192M,2G-64G:256M,64G-:512M", "net.ifnames": "0", "no_timer_check": true, "ro": true, "root": "UUID=1631a6ad-43b8-436d-ae76-16fa14b94458" }, "ansible_date_time": { "date": "2025-10-06", "day": "06", "epoch": "1759785110", "epoch_int": "1759785110", "hour": "21", "iso8601": "2025-10-06T21:11:50Z", "iso8601_basic": "20251006T211150282653", "iso8601_basic_short": "20251006T211150", "iso8601_micro": "2025-10-06T21:11:50.282653Z", "minute": "11", "month": "10", "second": "50", "time": "21:11:50", "tz": "UTC", "tz_dst": "UTC", "tz_offset": "+0000", "weekday": "Monday", "weekday_number": "1", "weeknumber": "40", "year": "2025" }, "ansible_default_ipv4": { "address": "38.102.83.194", "alias": "eth0", "broadcast": "38.102.83.255", "gateway": "38.102.83.1", "interface": "eth0", "macaddress": "fa:16:3e:66:11:8a", "mtu": 1500, "netmask": "255.255.255.0", "network": "38.102.83.0", "prefix": "24", "type": "ether" }, "ansible_default_ipv6": {}, "ansible_device_links": { "ids": { "sr0": [ "ata-QEMU_DVD-ROM_QM00001" ] }, "labels": { "sr0": [ "config-2" ] }, "masters": {}, "uuids": { "sr0": [ "2025-10-06-20-57-24-00" ], "vda1": [ "1631a6ad-43b8-436d-ae76-16fa14b94458" ] } }, "ansible_devices": { "sr0": { "holders": [], "host": "", "links": { "ids": [ "ata-QEMU_DVD-ROM_QM00001" ], "labels": [ "config-2" ], "masters": [], "uuids": [ "2025-10-06-20-57-24-00" ] }, "model": "QEMU DVD-ROM", "partitions": {}, "removable": "1", "rotational": "0", "sas_address": null, "sas_device_handle": null, "scheduler_mode": "mq-deadline", "sectors": "964", "sectorsize": "2048", "size": "482.00 KB", "support_discard": "2048", "vendor": "QEMU", "virtual": 1 }, "vda": { "holders": [], "host": "", "links": { "ids": [], "labels": [], "masters": [], "uuids": [] }, "model": null, "partitions": { "vda1": { "holders": [], "links": { "ids": [], "labels": [], "masters": [], "uuids": [ "1631a6ad-43b8-436d-ae76-16fa14b94458" ] }, "sectors": "167770079", "sectorsize": 512, "size": "80.00 GB", "start": "2048", "uuid": "1631a6ad-43b8-436d-ae76-16fa14b94458" } }, "removable": "0", "rotational": "1", "sas_address": null, "sas_device_handle": null, "scheduler_mode": "none", "sectors": "167772160", "sectorsize": "512", "size": "80.00 GB", "support_discard": "512", "vendor": "0x1af4", "virtual": 1 } }, "ansible_distribution": "CentOS", "ansible_distribution_file_parsed": true, "ansible_distribution_file_path": "/etc/centos-release", "ansible_distribution_file_variety": "CentOS", "ansible_distribution_major_version": "9", "ansible_distribution_release": "Stream", "ansible_distribution_version": "9", "ansible_dns": { "nameservers": [ "199.204.44.24", "199.204.47.54" ] }, "ansible_domain": "", "ansible_effective_group_id": 1000, "ansible_effective_user_id": 1000, "ansible_env": { "BASH_FUNC_which%%": "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}", "DBUS_SESSION_BUS_ADDRESS": "unix:path=/run/user/1000/bus", "DEBUGINFOD_IMA_CERT_PATH": "/etc/keys/ima:", "DEBUGINFOD_URLS": "https://debuginfod.centos.org/ ", "HOME": "/home/zuul", "LANG": "en_US.UTF-8", "LESSOPEN": "||/usr/bin/lesspipe.sh %s", "LOGNAME": "zuul", "MOTD_SHOWN": "pam", "PATH": "/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin", "PWD": "/home/zuul", "SELINUX_LEVEL_REQUESTED": "", "SELINUX_ROLE_REQUESTED": "", "SELINUX_USE_CURRENT_RANGE": "", "SHELL": "/bin/bash", "SHLVL": "1", "SSH_CLIENT": "38.102.83.51 40494 22", "SSH_CONNECTION": "38.102.83.51 40494 38.102.83.194 22", "USER": "zuul", "XDG_RUNTIME_DIR": "/run/user/1000", "XDG_SESSION_CLASS": "user", "XDG_SESSION_ID": "7", "XDG_SESSION_TYPE": "tty", "_": "/usr/bin/python3", "which_declare": "declare -f" }, "ansible_eth0": { "active": true, "device": "eth0", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "on", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "on [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "off [fixed]", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "on [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [fixed]", "tx_sctp_segmentation": "off [fixed]", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "off", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "off [fixed]", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "38.102.83.194", "broadcast": "38.102.83.255", "netmask": "255.255.255.0", "network": "38.102.83.0", "prefix": "24" }, "ipv6": [ { "address": "fe80::f816:3eff:fe66:118a", "prefix": "64", "scope": "link" } ], "macaddress": "fa:16:3e:66:11:8a", "module": "virtio_net", "mtu": 1500, "pciid": "virtio1", "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_eth1": { "active": true, "device": "eth1", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "on", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "on [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "off [fixed]", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "on [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [fixed]", "tx_sctp_segmentation": "off [fixed]", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "off", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "off [fixed]", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "192.168.122.101", "broadcast": "192.168.122.255", "netmask": "255.255.255.0", "network": "192.168.122.0", "prefix": "24" }, "macaddress": "fa:16:3e:cb:47:1e", "module": "virtio_net", "mtu": 1500, "pciid": "virtio5", "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_fibre_channel_wwn": [], "ansible_fips": false, "ansible_form_factor": "Other", "ansible_fqdn": "compute-1", "ansible_hostname": "compute-1", "ansible_hostnqn": "nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0", "ansible_interfaces": [ "eth1", "lo", "eth0" ], "ansible_is_chroot": false, "ansible_iscsi_iqn": "", "ansible_kernel": "5.14.0-620.el9.x86_64", "ansible_kernel_version": "#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025", "ansible_lo": { "active": true, "device": "lo", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "on [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "off [fixed]", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "off [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on [fixed]", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "on [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "on", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "off [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off [fixed]", "tx_scatter_gather": "on [fixed]", "tx_scatter_gather_fraglist": "on [fixed]", "tx_sctp_segmentation": "on", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "on", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "on", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "on [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "127.0.0.1", "broadcast": "", "netmask": "255.0.0.0", "network": "127.0.0.0", "prefix": "8" }, "ipv6": [ { "address": "::1", "prefix": "128", "scope": "host" } ], "mtu": 65536, "promisc": false, "timestamping": [], "type": "loopback" }, "ansible_loadavg": { "15m": 0.14, "1m": 0.07, "5m": 0.26 }, "ansible_local": {}, "ansible_locally_reachable_ips": { "ipv4": [ "38.102.83.194", "127.0.0.0/8", "127.0.0.1", "192.168.122.101" ], "ipv6": [ "::1", "fe80::f816:3eff:fe66:118a" ] }, "ansible_lsb": {}, "ansible_lvm": "N/A", "ansible_machine": "x86_64", "ansible_machine_id": "42833e1b511a402df82cb9cb2fc36491", "ansible_memfree_mb": 6815, "ansible_memory_mb": { "nocache": { "free": 7269, "used": 410 }, "real": { "free": 6815, "total": 7679, "used": 864 }, "swap": { "cached": 0, "free": 0, "total": 0, "used": 0 } }, "ansible_memtotal_mb": 7679, "ansible_mounts": [ { "block_available": 20340046, "block_size": 4096, "block_total": 20954875, "block_used": 614829, "device": "/dev/vda1", "fstype": "xfs", "inode_available": 41887492, "inode_total": 41942512, "inode_used": 55020, "mount": "/", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 83312828416, "size_total": 85831168000, "uuid": "1631a6ad-43b8-436d-ae76-16fa14b94458" } ], "ansible_nodename": "compute-1", "ansible_os_family": "RedHat", "ansible_pkg_mgr": "dnf", "ansible_proc_cmdline": { "BOOT_IMAGE": "(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64", "console": "ttyS0,115200n8", "crashkernel": "1G-2G:192M,2G-64G:256M,64G-:512M", "net.ifnames": "0", "no_timer_check": true, "ro": true, "root": "UUID=1631a6ad-43b8-436d-ae76-16fa14b94458" }, "ansible_processor": [ "0", "AuthenticAMD", "AMD EPYC-Rome Processor", "1", "AuthenticAMD", "AMD EPYC-Rome Processor", "2", "AuthenticAMD", "AMD EPYC-Rome Processor", "3", "AuthenticAMD", "AMD EPYC-Rome Processor", "4", "AuthenticAMD", "AMD EPYC-Rome Processor", "5", "AuthenticAMD", "AMD EPYC-Rome Processor", "6", "AuthenticAMD", "AMD EPYC-Rome Processor", "7", "AuthenticAMD", "AMD EPYC-Rome Processor" ], "ansible_processor_cores": 1, "ansible_processor_count": 8, "ansible_processor_nproc": 8, "ansible_processor_threads_per_core": 1, "ansible_processor_vcpus": 8, "ansible_product_name": "OpenStack Nova", "ansible_product_serial": "NA", "ansible_product_uuid": "NA", "ansible_product_version": "26.2.1", "ansible_python": { "executable": "/usr/bin/python3", "has_sslcontext": true, "type": "cpython", "version": { "major": 3, "micro": 23, "minor": 9, "releaselevel": "final", "serial": 0 }, "version_info": [ 3, 9, 23, "final", 0 ] }, "ansible_python_version": "3.9.23", "ansible_real_group_id": 1000, "ansible_real_user_id": 1000, "ansible_selinux": { "config_mode": "enforcing", "mode": "enforcing", "policyvers": 33, "status": "enabled", "type": "targeted" }, "ansible_selinux_python_present": true, "ansible_service_mgr": "systemd", "ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMTgbNX9dV8PbdYRYJgdLGAmpYTw9D60MdrTlouplSpg0YYsXgPcoTYqPcnsQeu0q8Dhp49vz6EaftjDleBSPRc=", "ansible_ssh_host_key_ecdsa_public_keytype": "ecdsa-sha2-nistp256", "ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAIKyyYn981xqM8QXbrWUowTM5nwuXYFJQ6duZFmdVeYTr", "ansible_ssh_host_key_ed25519_public_keytype": "ssh-ed25519", "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABgQDD/EaxevhIIgCsjkZD8fsuuLFaKprWcI8Xf9fpPEfi8Zn71kEhnu+1usw1HlgsncOk/flzasKRGJjCFox0PUgX8jMrj2ZGOhiGL6aPNkXgxmFXGJYcgV3/Gmb0aSRZQ9FpDzH//d9vxTKFg+h3QwZvfCPR6qqSihmJUv7vH71hDFTTf6MzShu1bZwZDTCZ3z2ti0ty96iJjliYwoBVtelM93cBzsyI0TYzKdfnhDWkzM3oEIOxzxhaQPPYuPr7mX4I3HDh2xHcoaxD3bFvcuvHKi8C1s6MZ5W5Iht+bbNFEkl/4bafo0wEsxbKnfhBCBAKG4RQ0SnDspbqLSkzSxSH7Liqflynj4XfektVZIZrJ8nUmJ22iOCiz6lGqViyfxZfduSRlibuOXqaMH0ImCIDRthFsqToCMyRty1XpB32ulYKlrstGqe1T4ZdW00u8+xu5bXw/+BUHo689PipHhqtdu5XBZUZr137oxQ7aeQCg4mr2eazVG07cLhoHdGqoRk=", "ansible_ssh_host_key_rsa_public_keytype": "ssh-rsa", "ansible_swapfree_mb": 0, "ansible_swaptotal_mb": 0, "ansible_system": "Linux", "ansible_system_capabilities": [ "" ], "ansible_system_capabilities_enforced": "True", "ansible_system_vendor": "OpenStack Foundation", "ansible_uptime_seconds": 822, "ansible_user_dir": "/home/zuul", "ansible_user_gecos": "", "ansible_user_gid": 1000, "ansible_user_id": "zuul", "ansible_user_shell": "/bin/bash", "ansible_user_uid": 1000, "ansible_userspace_architecture": "x86_64", "ansible_userspace_bits": "64", "ansible_virtualization_role": "guest", "ansible_virtualization_tech_guest": [ "openstack" ], "ansible_virtualization_tech_host": [ "kvm" ], "ansible_virtualization_type": "openstack", "discovered_interpreter_python": "/usr/bin/python3", "gather_subset": [ "all" ], "module_setup": true }././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13/ansible_facts_cache/localhosthome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13/ansible_facts_0000644000175000017500000015776315071030372031613 0ustar zuulzuul{ "_ansible_facts_gathered": true, "ansible_all_ipv4_addresses": [ "38.102.83.51", "192.168.122.11" ], "ansible_all_ipv6_addresses": [ "fe80::f816:3eff:fe6f:820" ], "ansible_apparmor": { "status": "disabled" }, "ansible_architecture": "x86_64", "ansible_bios_date": "04/01/2014", "ansible_bios_vendor": "SeaBIOS", "ansible_bios_version": "1.15.0-1", "ansible_board_asset_tag": "NA", "ansible_board_name": "NA", "ansible_board_serial": "NA", "ansible_board_vendor": "NA", "ansible_board_version": "NA", "ansible_chassis_asset_tag": "NA", "ansible_chassis_serial": "NA", "ansible_chassis_vendor": "QEMU", "ansible_chassis_version": "pc-i440fx-6.2", "ansible_cmdline": { "BOOT_IMAGE": "(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64", "console": "ttyS0,115200n8", "crashkernel": "1G-2G:192M,2G-64G:256M,64G-:512M", "net.ifnames": "0", "no_timer_check": true, "ro": true, "root": "UUID=1631a6ad-43b8-436d-ae76-16fa14b94458" }, "ansible_date_time": { "date": "2025-10-06", "day": "06", "epoch": "1759784989", "epoch_int": "1759784989", "hour": "21", "iso8601": "2025-10-06T21:09:49Z", "iso8601_basic": "20251006T210949189106", "iso8601_basic_short": "20251006T210949", "iso8601_micro": "2025-10-06T21:09:49.189106Z", "minute": "09", "month": "10", "second": "49", "time": "21:09:49", "tz": "UTC", "tz_dst": "UTC", "tz_offset": "+0000", "weekday": "Monday", "weekday_number": "1", "weeknumber": "40", "year": "2025" }, "ansible_default_ipv4": { "address": "38.102.83.51", "alias": "eth0", "broadcast": "38.102.83.255", "gateway": "38.102.83.1", "interface": "eth0", "macaddress": "fa:16:3e:6f:08:20", "mtu": 1500, "netmask": "255.255.255.0", "network": "38.102.83.0", "prefix": "24", "type": "ether" }, "ansible_default_ipv6": {}, "ansible_device_links": { "ids": { "sr0": [ "ata-QEMU_DVD-ROM_QM00001" ] }, "labels": { "sr0": [ "config-2" ] }, "masters": {}, "uuids": { "sr0": [ "2025-10-06-20-56-29-00" ], "vda1": [ "1631a6ad-43b8-436d-ae76-16fa14b94458" ] } }, "ansible_devices": { "sr0": { "holders": [], "host": "", "links": { "ids": [ "ata-QEMU_DVD-ROM_QM00001" ], "labels": [ "config-2" ], "masters": [], "uuids": [ "2025-10-06-20-56-29-00" ] }, "model": "QEMU DVD-ROM", "partitions": {}, "removable": "1", "rotational": "0", "sas_address": null, "sas_device_handle": null, "scheduler_mode": "mq-deadline", "sectors": "964", "sectorsize": "2048", "size": "482.00 KB", "support_discard": "2048", "vendor": "QEMU", "virtual": 1 }, "vda": { "holders": [], "host": "", "links": { "ids": [], "labels": [], "masters": [], "uuids": [] }, "model": null, "partitions": { "vda1": { "holders": [], "links": { "ids": [], "labels": [], "masters": [], "uuids": [ "1631a6ad-43b8-436d-ae76-16fa14b94458" ] }, "sectors": "83883999", "sectorsize": 512, "size": "40.00 GB", "start": "2048", "uuid": "1631a6ad-43b8-436d-ae76-16fa14b94458" } }, "removable": "0", "rotational": "1", "sas_address": null, "sas_device_handle": null, "scheduler_mode": "none", "sectors": "83886080", "sectorsize": "512", "size": "40.00 GB", "support_discard": "512", "vendor": "0x1af4", "virtual": 1 } }, "ansible_distribution": "CentOS", "ansible_distribution_file_parsed": true, "ansible_distribution_file_path": "/etc/centos-release", "ansible_distribution_file_variety": "CentOS", "ansible_distribution_major_version": "9", "ansible_distribution_release": "Stream", "ansible_distribution_version": "9", "ansible_dns": { "nameservers": [ "192.168.122.10", "199.204.44.24", "199.204.47.54" ] }, "ansible_domain": "", "ansible_effective_group_id": 1000, "ansible_effective_user_id": 1000, "ansible_env": { "BASH_FUNC_which%%": "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}", "DBUS_SESSION_BUS_ADDRESS": "unix:path=/run/user/1000/bus", "DEBUGINFOD_IMA_CERT_PATH": "/etc/keys/ima:", "DEBUGINFOD_URLS": "https://debuginfod.centos.org/ ", "HOME": "/home/zuul", "LANG": "en_US.UTF-8", "LESSOPEN": "||/usr/bin/lesspipe.sh %s", "LOGNAME": "zuul", "MOTD_SHOWN": "pam", "PATH": "/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin", "PWD": "/home/zuul/src/github.com/openstack-k8s-operators/ci-framework", "SELINUX_LEVEL_REQUESTED": "", "SELINUX_ROLE_REQUESTED": "", "SELINUX_USE_CURRENT_RANGE": "", "SHELL": "/bin/bash", "SHLVL": "2", "SSH_CLIENT": "38.102.83.114 57854 22", "SSH_CONNECTION": "38.102.83.114 57854 38.102.83.51 22", "USER": "zuul", "XDG_RUNTIME_DIR": "/run/user/1000", "XDG_SESSION_CLASS": "user", "XDG_SESSION_ID": "9", "XDG_SESSION_TYPE": "tty", "_": "/usr/bin/python3", "which_declare": "declare -f" }, "ansible_eth0": { "active": true, "device": "eth0", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "on", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "on [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "off [fixed]", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "on [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [fixed]", "tx_sctp_segmentation": "off [fixed]", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "off", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "off [fixed]", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "38.102.83.51", "broadcast": "38.102.83.255", "netmask": "255.255.255.0", "network": "38.102.83.0", "prefix": "24" }, "ipv6": [ { "address": "fe80::f816:3eff:fe6f:820", "prefix": "64", "scope": "link" } ], "macaddress": "fa:16:3e:6f:08:20", "module": "virtio_net", "mtu": 1500, "pciid": "virtio1", "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_eth1": { "active": true, "device": "eth1", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "on", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "on [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "off [fixed]", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "on [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [fixed]", "tx_sctp_segmentation": "off [fixed]", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "off", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "off [fixed]", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "192.168.122.11", "broadcast": "192.168.122.255", "netmask": "255.255.255.0", "network": "192.168.122.0", "prefix": "24" }, "macaddress": "fa:16:3e:fc:47:4f", "module": "virtio_net", "mtu": 1500, "pciid": "virtio5", "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_fibre_channel_wwn": [], "ansible_fips": false, "ansible_form_factor": "Other", "ansible_fqdn": "controller", "ansible_hostname": "controller", "ansible_hostnqn": "nqn.2014-08.org.nvmexpress:uuid:2f7d2450-18ac-43a6-80ee-9caa4a7736e0", "ansible_interfaces": [ "lo", "eth1", "eth0" ], "ansible_is_chroot": false, "ansible_iscsi_iqn": "", "ansible_kernel": "5.14.0-620.el9.x86_64", "ansible_kernel_version": "#1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025", "ansible_lo": { "active": true, "device": "lo", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "on [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "off [fixed]", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "off [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on [fixed]", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "on [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "on", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "off [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off [fixed]", "tx_scatter_gather": "on [fixed]", "tx_scatter_gather_fraglist": "on [fixed]", "tx_sctp_segmentation": "on", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "on", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "on", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "on [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "127.0.0.1", "broadcast": "", "netmask": "255.0.0.0", "network": "127.0.0.0", "prefix": "8" }, "ipv6": [ { "address": "::1", "prefix": "128", "scope": "host" } ], "mtu": 65536, "promisc": false, "timestamping": [], "type": "loopback" }, "ansible_loadavg": { "15m": 0.47, "1m": 1.23, "5m": 0.87 }, "ansible_local": {}, "ansible_locally_reachable_ips": { "ipv4": [ "38.102.83.51", "127.0.0.0/8", "127.0.0.1", "192.168.122.11" ], "ipv6": [ "::1", "fe80::f816:3eff:fe6f:820" ] }, "ansible_lsb": {}, "ansible_lvm": "N/A", "ansible_machine": "x86_64", "ansible_machine_id": "42833e1b511a402df82cb9cb2fc36491", "ansible_memfree_mb": 1530, "ansible_memory_mb": { "nocache": { "free": 2950, "used": 705 }, "real": { "free": 1530, "total": 3655, "used": 2125 }, "swap": { "cached": 0, "free": 0, "total": 0, "used": 0 } }, "ansible_memtotal_mb": 3655, "ansible_mounts": [ { "block_available": 9531797, "block_size": 4096, "block_total": 10469115, "block_used": 937318, "device": "/dev/vda1", "fstype": "xfs", "inode_available": 20821759, "inode_total": 20970992, "inode_used": 149233, "mount": "/", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 39042240512, "size_total": 42881495040, "uuid": "1631a6ad-43b8-436d-ae76-16fa14b94458" } ], "ansible_nodename": "controller", "ansible_os_family": "RedHat", "ansible_pkg_mgr": "dnf", "ansible_proc_cmdline": { "BOOT_IMAGE": "(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64", "console": "ttyS0,115200n8", "crashkernel": "1G-2G:192M,2G-64G:256M,64G-:512M", "net.ifnames": "0", "no_timer_check": true, "ro": true, "root": "UUID=1631a6ad-43b8-436d-ae76-16fa14b94458" }, "ansible_processor": [ "0", "AuthenticAMD", "AMD EPYC-Rome Processor", "1", "AuthenticAMD", "AMD EPYC-Rome Processor" ], "ansible_processor_cores": 1, "ansible_processor_count": 2, "ansible_processor_nproc": 2, "ansible_processor_threads_per_core": 1, "ansible_processor_vcpus": 2, "ansible_product_name": "OpenStack Nova", "ansible_product_serial": "NA", "ansible_product_uuid": "NA", "ansible_product_version": "26.2.1", "ansible_python": { "executable": "/usr/bin/python3", "has_sslcontext": true, "type": "cpython", "version": { "major": 3, "micro": 23, "minor": 9, "releaselevel": "final", "serial": 0 }, "version_info": [ 3, 9, 23, "final", 0 ] }, "ansible_python_version": "3.9.23", "ansible_real_group_id": 1000, "ansible_real_user_id": 1000, "ansible_selinux": { "config_mode": "enforcing", "mode": "enforcing", "policyvers": 33, "status": "enabled", "type": "targeted" }, "ansible_selinux_python_present": true, "ansible_service_mgr": "systemd", "ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDpEwzeDGLwNlfP3Up6vCxCw7kSSu0AiDUvDH/J+EepxMPGLLpzT0wX+lEXL9GArqfNU/UBUmiiwh9dZO9tQ5bk=", "ansible_ssh_host_key_ecdsa_public_keytype": "ecdsa-sha2-nistp256", "ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAIPrlPStzDnFCaI6YFfPj0aQKsBPAAZFkT8awb2RrAe7g", "ansible_ssh_host_key_ed25519_public_keytype": "ssh-ed25519", "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABgQCsnSYzB9ciMqmgm0l3UC4GvkzqKIRU4HQjm2Wmmz4ONumnWKBZyfZPUd4C1zWgntSd7/HfwyQS5GOnhobA5K/1o855yq/Qr6a2M0JVvxnLdxB089mymIDZ9Z5iXDsVHJNPHKuz3pjoZDbA5XzpQPsDbEeMHpBd0Yz5DQaAPYYN1wg4Wtq6PK86i2jV8qtVH7OnCkn06futt/HtJ4eADwKZV6cutqDHmuTqXwagLJ7PWTm0H9xAYR/Tsgd28krH/EIdcyHBACqdSrk6FWPOdZ1Q5PjVC0ZOHemQeiRhmYW5NgxnnEgSmoTMCyMKRYbVcMYeHKRBg/rXhLSbymoU+eF+Kza486CELgT9KG4Z0NTOmyzNu1ee8G0ZOaowjIQ8Gr6e15WUMLbskShDGqXlAnaRHOAQhBSGCkt0N9KMyGaBdFYVzJOgqi1erPoCN1pLe7Ljr44blAH6Yvp9H8Ji4mLuVYB7PmDHL0Mb4zkjqi/MU9Okx1escBZI4ASrkaXkp18=", "ansible_ssh_host_key_rsa_public_keytype": "ssh-rsa", "ansible_swapfree_mb": 0, "ansible_swaptotal_mb": 0, "ansible_system": "Linux", "ansible_system_capabilities": [ "" ], "ansible_system_capabilities_enforced": "True", "ansible_system_vendor": "OpenStack Foundation", "ansible_uptime_seconds": 791, "ansible_user_dir": "/home/zuul", "ansible_user_gecos": "", "ansible_user_gid": 1000, "ansible_user_id": "zuul", "ansible_user_shell": "/bin/bash", "ansible_user_uid": 1000, "ansible_userspace_architecture": "x86_64", "ansible_userspace_bits": "64", "ansible_virtualization_role": "guest", "ansible_virtualization_tech_guest": [ "openstack" ], "ansible_virtualization_tech_host": [ "kvm" ], "ansible_virtualization_type": "openstack", "cifmw_discovered_hash": "0653754d2af8079f7c677f47fa5006beb1f727ca036526ac8bf07bf53cd43e47", "cifmw_discovered_hash_algorithm": "sha256", "cifmw_discovered_image_name": "CentOS-Stream-GenericCloud-x86_64-9-latest.x86_64.qcow2", "cifmw_discovered_image_url": "https://cloud.centos.org/centos/9-stream/x86_64/images//CentOS-Stream-GenericCloud-x86_64-9-latest.x86_64.qcow2", "cifmw_install_yamls_defaults": { "ADOPTED_EXTERNAL_NETWORK": "172.21.1.0/24", "ADOPTED_INTERNALAPI_NETWORK": "172.17.1.0/24", "ADOPTED_STORAGEMGMT_NETWORK": "172.20.1.0/24", "ADOPTED_STORAGE_NETWORK": "172.18.1.0/24", "ADOPTED_TENANT_NETWORK": "172.9.1.0/24", "ANSIBLEEE": "config/samples/_v1beta1_ansibleee.yaml", "ANSIBLEEE_BRANCH": "main", "ANSIBLEEE_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml", "ANSIBLEEE_IMG": "quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest", "ANSIBLEEE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml", "ANSIBLEEE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/tests/kuttl/tests", "ANSIBLEEE_KUTTL_NAMESPACE": "ansibleee-kuttl-tests", "ANSIBLEEE_REPO": "https://github.com/openstack-k8s-operators/openstack-ansibleee-operator", "ANSIBLEE_COMMIT_HASH": "", "BARBICAN": "config/samples/barbican_v1beta1_barbican.yaml", "BARBICAN_BRANCH": "main", "BARBICAN_COMMIT_HASH": "", "BARBICAN_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml", "BARBICAN_DEPL_IMG": "unused", "BARBICAN_IMG": "quay.io/openstack-k8s-operators/barbican-operator-index:latest", "BARBICAN_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml", "BARBICAN_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/tests/kuttl/tests", "BARBICAN_KUTTL_NAMESPACE": "barbican-kuttl-tests", "BARBICAN_REPO": "https://github.com/openstack-k8s-operators/barbican-operator.git", "BARBICAN_SERVICE_ENABLED": "true", "BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY": "sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU=", "BAREMETAL_BRANCH": "main", "BAREMETAL_COMMIT_HASH": "", "BAREMETAL_IMG": "quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest", "BAREMETAL_OS_CONTAINER_IMG": "", "BAREMETAL_OS_IMG": "", "BAREMETAL_REPO": "https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git", "BAREMETAL_TIMEOUT": "20m", "BASH_IMG": "quay.io/openstack-k8s-operators/bash:latest", "BGP_ASN": "64999", "BGP_LEAF_1": "100.65.4.1", "BGP_LEAF_2": "100.64.4.1", "BGP_OVN_ROUTING": "false", "BGP_PEER_ASN": "64999", "BGP_SOURCE_IP": "172.30.4.2", "BGP_SOURCE_IP6": "f00d:f00d:f00d:f00d:f00d:f00d:f00d:42", "BMAAS_BRIDGE_IPV4_PREFIX": "172.20.1.2/24", "BMAAS_BRIDGE_IPV6_PREFIX": "fd00:bbbb::2/64", "BMAAS_INSTANCE_DISK_SIZE": "20", "BMAAS_INSTANCE_MEMORY": "4096", "BMAAS_INSTANCE_NAME_PREFIX": "crc-bmaas", "BMAAS_INSTANCE_NET_MODEL": "virtio", "BMAAS_INSTANCE_OS_VARIANT": "centos-stream9", "BMAAS_INSTANCE_VCPUS": "2", "BMAAS_INSTANCE_VIRT_TYPE": "kvm", "BMAAS_IPV4": "true", "BMAAS_IPV6": "false", "BMAAS_LIBVIRT_USER": "sushyemu", "BMAAS_METALLB_ADDRESS_POOL": "172.20.1.64/26", "BMAAS_METALLB_POOL_NAME": "baremetal", "BMAAS_NETWORK_IPV4_PREFIX": "172.20.1.1/24", "BMAAS_NETWORK_IPV6_PREFIX": "fd00:bbbb::1/64", "BMAAS_NETWORK_NAME": "crc-bmaas", "BMAAS_NODE_COUNT": "1", "BMAAS_OCP_INSTANCE_NAME": "crc", "BMAAS_REDFISH_PASSWORD": "password", "BMAAS_REDFISH_USERNAME": "admin", "BMAAS_ROUTE_LIBVIRT_NETWORKS": "crc-bmaas,crc,default", "BMAAS_SUSHY_EMULATOR_DRIVER": "libvirt", "BMAAS_SUSHY_EMULATOR_IMAGE": "quay.io/metal3-io/sushy-tools:latest", "BMAAS_SUSHY_EMULATOR_NAMESPACE": "sushy-emulator", "BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE": "/etc/openstack/clouds.yaml", "BMAAS_SUSHY_EMULATOR_OS_CLOUD": "openstack", "BMH_NAMESPACE": "openstack", "BMO_BRANCH": "release-0.9", "BMO_COMMIT_HASH": "", "BMO_IPA_BRANCH": "stable/2024.1", "BMO_IRONIC_HOST": "192.168.122.10", "BMO_PROVISIONING_INTERFACE": "", "BMO_REPO": "https://github.com/metal3-io/baremetal-operator", "BMO_SETUP": false, "BMO_SETUP_ROUTE_REPLACE": "true", "BM_CTLPLANE_INTERFACE": "enp1s0", "BM_INSTANCE_MEMORY": "8192", "BM_INSTANCE_NAME_PREFIX": "edpm-compute-baremetal", "BM_INSTANCE_NAME_SUFFIX": "0", "BM_NETWORK_NAME": "default", "BM_NODE_COUNT": "1", "BM_ROOT_PASSWORD": "", "BM_ROOT_PASSWORD_SECRET": "", "CEILOMETER_CENTRAL_DEPL_IMG": "unused", "CEILOMETER_NOTIFICATION_DEPL_IMG": "unused", "CEPH_BRANCH": "release-1.15", "CEPH_CLIENT": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml", "CEPH_COMMON": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml", "CEPH_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml", "CEPH_CRDS": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml", "CEPH_IMG": "quay.io/ceph/demo:latest-squid", "CEPH_OP": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml", "CEPH_REPO": "https://github.com/rook/rook.git", "CERTMANAGER_TIMEOUT": "300s", "CHECKOUT_FROM_OPENSTACK_REF": "true", "CINDER": "config/samples/cinder_v1beta1_cinder.yaml", "CINDERAPI_DEPL_IMG": "unused", "CINDERBKP_DEPL_IMG": "unused", "CINDERSCH_DEPL_IMG": "unused", "CINDERVOL_DEPL_IMG": "unused", "CINDER_BRANCH": "main", "CINDER_COMMIT_HASH": "", "CINDER_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml", "CINDER_IMG": "quay.io/openstack-k8s-operators/cinder-operator-index:latest", "CINDER_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml", "CINDER_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests", "CINDER_KUTTL_NAMESPACE": "cinder-kuttl-tests", "CINDER_REPO": "https://github.com/openstack-k8s-operators/cinder-operator.git", "CLEANUP_DIR_CMD": "rm -Rf", "CRC_BGP_NIC_1_MAC": "52:54:00:11:11:11", "CRC_BGP_NIC_2_MAC": "52:54:00:11:11:12", "CRC_HTTPS_PROXY": "", "CRC_HTTP_PROXY": "", "CRC_STORAGE_NAMESPACE": "crc-storage", "CRC_STORAGE_RETRIES": "3", "CRC_URL": "'https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz'", "CRC_VERSION": "latest", "DATAPLANE_ANSIBLE_SECRET": "dataplane-ansible-ssh-private-key-secret", "DATAPLANE_ANSIBLE_USER": "", "DATAPLANE_COMPUTE_IP": "192.168.122.100", "DATAPLANE_CONTAINER_PREFIX": "openstack", "DATAPLANE_CONTAINER_TAG": "current-podified", "DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG": "quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest", "DATAPLANE_DEFAULT_GW": "192.168.122.1", "DATAPLANE_EXTRA_NOVA_CONFIG_FILE": "/dev/null", "DATAPLANE_GROWVOLS_ARGS": "/=8GB /tmp=1GB /home=1GB /var=100%", "DATAPLANE_KUSTOMIZE_SCENARIO": "preprovisioned", "DATAPLANE_NETWORKER_IP": "192.168.122.200", "DATAPLANE_NETWORK_INTERFACE_NAME": "eth0", "DATAPLANE_NOVA_NFS_PATH": "", "DATAPLANE_NTP_SERVER": "pool.ntp.org", "DATAPLANE_PLAYBOOK": "osp.edpm.download_cache", "DATAPLANE_REGISTRY_URL": "quay.io/podified-antelope-centos9", "DATAPLANE_RUNNER_IMG": "", "DATAPLANE_SERVER_ROLE": "compute", "DATAPLANE_SSHD_ALLOWED_RANGES": "['192.168.122.0/24']", "DATAPLANE_TIMEOUT": "30m", "DATAPLANE_TLS_ENABLED": "true", "DATAPLANE_TOTAL_NETWORKER_NODES": "1", "DATAPLANE_TOTAL_NODES": "1", "DBSERVICE": "galera", "DESIGNATE": "config/samples/designate_v1beta1_designate.yaml", "DESIGNATE_BRANCH": "main", "DESIGNATE_COMMIT_HASH": "", "DESIGNATE_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml", "DESIGNATE_IMG": "quay.io/openstack-k8s-operators/designate-operator-index:latest", "DESIGNATE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml", "DESIGNATE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/tests/kuttl/tests", "DESIGNATE_KUTTL_NAMESPACE": "designate-kuttl-tests", "DESIGNATE_REPO": "https://github.com/openstack-k8s-operators/designate-operator.git", "DNSDATA": "config/samples/network_v1beta1_dnsdata.yaml", "DNSDATA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml", "DNSMASQ": "config/samples/network_v1beta1_dnsmasq.yaml", "DNSMASQ_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml", "DNS_DEPL_IMG": "unused", "DNS_DOMAIN": "localdomain", "DOWNLOAD_TOOLS_SELECTION": "all", "EDPM_ATTACH_EXTNET": "true", "EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES": "'[]'", "EDPM_COMPUTE_ADDITIONAL_NETWORKS": "'[]'", "EDPM_COMPUTE_CELLS": "1", "EDPM_COMPUTE_CEPH_ENABLED": "true", "EDPM_COMPUTE_CEPH_NOVA": "true", "EDPM_COMPUTE_DHCP_AGENT_ENABLED": "true", "EDPM_COMPUTE_SRIOV_ENABLED": "true", "EDPM_COMPUTE_SUFFIX": "0", "EDPM_CONFIGURE_DEFAULT_ROUTE": "true", "EDPM_CONFIGURE_HUGEPAGES": "false", "EDPM_CONFIGURE_NETWORKING": "true", "EDPM_FIRSTBOOT_EXTRA": "/tmp/edpm-firstboot-extra", "EDPM_NETWORKER_SUFFIX": "0", "EDPM_TOTAL_NETWORKERS": "1", "EDPM_TOTAL_NODES": "1", "GALERA_REPLICAS": "", "GENERATE_SSH_KEYS": "true", "GIT_CLONE_OPTS": "", "GLANCE": "config/samples/glance_v1beta1_glance.yaml", "GLANCEAPI_DEPL_IMG": "unused", "GLANCE_BRANCH": "main", "GLANCE_COMMIT_HASH": "", "GLANCE_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml", "GLANCE_IMG": "quay.io/openstack-k8s-operators/glance-operator-index:latest", "GLANCE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml", "GLANCE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests", "GLANCE_KUTTL_NAMESPACE": "glance-kuttl-tests", "GLANCE_REPO": "https://github.com/openstack-k8s-operators/glance-operator.git", "HEAT": "config/samples/heat_v1beta1_heat.yaml", "HEATAPI_DEPL_IMG": "unused", "HEATCFNAPI_DEPL_IMG": "unused", "HEATENGINE_DEPL_IMG": "unused", "HEAT_AUTH_ENCRYPTION_KEY": "767c3ed056cbaa3b9dfedb8c6f825bf0", "HEAT_BRANCH": "main", "HEAT_COMMIT_HASH": "", "HEAT_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml", "HEAT_IMG": "quay.io/openstack-k8s-operators/heat-operator-index:latest", "HEAT_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml", "HEAT_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/tests/kuttl/tests", "HEAT_KUTTL_NAMESPACE": "heat-kuttl-tests", "HEAT_REPO": "https://github.com/openstack-k8s-operators/heat-operator.git", "HEAT_SERVICE_ENABLED": "true", "HORIZON": "config/samples/horizon_v1beta1_horizon.yaml", "HORIZON_BRANCH": "main", "HORIZON_COMMIT_HASH": "", "HORIZON_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml", "HORIZON_DEPL_IMG": "unused", "HORIZON_IMG": "quay.io/openstack-k8s-operators/horizon-operator-index:latest", "HORIZON_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml", "HORIZON_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/tests/kuttl/tests", "HORIZON_KUTTL_NAMESPACE": "horizon-kuttl-tests", "HORIZON_REPO": "https://github.com/openstack-k8s-operators/horizon-operator.git", "INFRA_BRANCH": "main", "INFRA_COMMIT_HASH": "", "INFRA_IMG": "quay.io/openstack-k8s-operators/infra-operator-index:latest", "INFRA_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml", "INFRA_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/tests/kuttl/tests", "INFRA_KUTTL_NAMESPACE": "infra-kuttl-tests", "INFRA_REPO": "https://github.com/openstack-k8s-operators/infra-operator.git", "INSTALL_CERT_MANAGER": false, "INSTALL_NMSTATE": "true || false", "INSTALL_NNCP": "true || false", "INTERNALAPI_HOST_ROUTES": "", "IPV6_LAB_IPV4_NETWORK_IPADDRESS": "172.30.0.1/24", "IPV6_LAB_IPV6_NETWORK_IPADDRESS": "fd00:abcd:abcd:fc00::1/64", "IPV6_LAB_LIBVIRT_STORAGE_POOL": "default", "IPV6_LAB_MANAGE_FIREWALLD": "true", "IPV6_LAB_NAT64_HOST_IPV4": "172.30.0.2/24", "IPV6_LAB_NAT64_HOST_IPV6": "fd00:abcd:abcd:fc00::2/64", "IPV6_LAB_NAT64_INSTANCE_NAME": "nat64-router", "IPV6_LAB_NAT64_IPV6_NETWORK": "fd00:abcd:abcd:fc00::/64", "IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL": "192.168.255.0/24", "IPV6_LAB_NAT64_TAYGA_IPV4": "192.168.255.1", "IPV6_LAB_NAT64_TAYGA_IPV6": "fd00:abcd:abcd:fc00::3", "IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX": "fd00:abcd:abcd:fcff::/96", "IPV6_LAB_NAT64_UPDATE_PACKAGES": "false", "IPV6_LAB_NETWORK_NAME": "nat64", "IPV6_LAB_SNO_CLUSTER_NETWORK": "fd00:abcd:0::/48", "IPV6_LAB_SNO_HOST_IP": "fd00:abcd:abcd:fc00::11", "IPV6_LAB_SNO_HOST_PREFIX": "64", "IPV6_LAB_SNO_INSTANCE_NAME": "sno", "IPV6_LAB_SNO_MACHINE_NETWORK": "fd00:abcd:abcd:fc00::/64", "IPV6_LAB_SNO_OCP_MIRROR_URL": "https://mirror.openshift.com/pub/openshift-v4/clients/ocp", "IPV6_LAB_SNO_OCP_VERSION": "latest-4.14", "IPV6_LAB_SNO_SERVICE_NETWORK": "fd00:abcd:abcd:fc03::/112", "IPV6_LAB_SSH_PUB_KEY": "/home/zuul/.ssh/id_rsa.pub", "IPV6_LAB_WORK_DIR": "/home/zuul/.ipv6lab", "IRONIC": "config/samples/ironic_v1beta1_ironic.yaml", "IRONICAPI_DEPL_IMG": "unused", "IRONICCON_DEPL_IMG": "unused", "IRONICINS_DEPL_IMG": "unused", "IRONICNAG_DEPL_IMG": "unused", "IRONICPXE_DEPL_IMG": "unused", "IRONIC_BRANCH": "main", "IRONIC_COMMIT_HASH": "", "IRONIC_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml", "IRONIC_IMAGE_TAG": "release-24.1", "IRONIC_IMG": "quay.io/openstack-k8s-operators/ironic-operator-index:latest", "IRONIC_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml", "IRONIC_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/tests/kuttl/tests", "IRONIC_KUTTL_NAMESPACE": "ironic-kuttl-tests", "IRONIC_REPO": "https://github.com/openstack-k8s-operators/ironic-operator.git", "KEYSTONEAPI": "config/samples/keystone_v1beta1_keystoneapi.yaml", "KEYSTONEAPI_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml", "KEYSTONEAPI_DEPL_IMG": "unused", "KEYSTONE_BRANCH": "main", "KEYSTONE_COMMIT_HASH": "", "KEYSTONE_FEDERATION_CLIENT_SECRET": "COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f", "KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE": "openstack", "KEYSTONE_IMG": "quay.io/openstack-k8s-operators/keystone-operator-index:latest", "KEYSTONE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml", "KEYSTONE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/tests/kuttl/tests", "KEYSTONE_KUTTL_NAMESPACE": "keystone-kuttl-tests", "KEYSTONE_REPO": "https://github.com/openstack-k8s-operators/keystone-operator.git", "KUBEADMIN_PWD": "12345678", "LIBVIRT_SECRET": "libvirt-secret", "LOKI_DEPLOY_MODE": "openshift-network", "LOKI_DEPLOY_NAMESPACE": "netobserv", "LOKI_DEPLOY_SIZE": "1x.demo", "LOKI_NAMESPACE": "openshift-operators-redhat", "LOKI_OPERATOR_GROUP": "openshift-operators-redhat-loki", "LOKI_SUBSCRIPTION": "loki-operator", "LVMS_CR": "1", "MANILA": "config/samples/manila_v1beta1_manila.yaml", "MANILAAPI_DEPL_IMG": "unused", "MANILASCH_DEPL_IMG": "unused", "MANILASHARE_DEPL_IMG": "unused", "MANILA_BRANCH": "main", "MANILA_COMMIT_HASH": "", "MANILA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml", "MANILA_IMG": "quay.io/openstack-k8s-operators/manila-operator-index:latest", "MANILA_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml", "MANILA_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests", "MANILA_KUTTL_NAMESPACE": "manila-kuttl-tests", "MANILA_REPO": "https://github.com/openstack-k8s-operators/manila-operator.git", "MANILA_SERVICE_ENABLED": "true", "MARIADB": "config/samples/mariadb_v1beta1_galera.yaml", "MARIADB_BRANCH": "main", "MARIADB_CHAINSAW_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/config.yaml", "MARIADB_CHAINSAW_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/tests", "MARIADB_CHAINSAW_NAMESPACE": "mariadb-chainsaw-tests", "MARIADB_COMMIT_HASH": "", "MARIADB_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml", "MARIADB_DEPL_IMG": "unused", "MARIADB_IMG": "quay.io/openstack-k8s-operators/mariadb-operator-index:latest", "MARIADB_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml", "MARIADB_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/kuttl/tests", "MARIADB_KUTTL_NAMESPACE": "mariadb-kuttl-tests", "MARIADB_REPO": "https://github.com/openstack-k8s-operators/mariadb-operator.git", "MEMCACHED": "config/samples/memcached_v1beta1_memcached.yaml", "MEMCACHED_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml", "MEMCACHED_DEPL_IMG": "unused", "METADATA_SHARED_SECRET": "1234567842", "METALLB_IPV6_POOL": "fd00:aaaa::80-fd00:aaaa::90", "METALLB_POOL": "192.168.122.80-192.168.122.90", "MICROSHIFT": "0", "NAMESPACE": "openstack", "NETCONFIG": "config/samples/network_v1beta1_netconfig.yaml", "NETCONFIG_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml", "NETCONFIG_DEPL_IMG": "unused", "NETOBSERV_DEPLOY_NAMESPACE": "netobserv", "NETOBSERV_NAMESPACE": "openshift-netobserv-operator", "NETOBSERV_OPERATOR_GROUP": "openshift-netobserv-operator-net", "NETOBSERV_SUBSCRIPTION": "netobserv-operator", "NETWORK_BGP": "false", "NETWORK_DESIGNATE_ADDRESS_PREFIX": "172.28.0", "NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX": "172.50.0", "NETWORK_INTERNALAPI_ADDRESS_PREFIX": "172.17.0", "NETWORK_ISOLATION": "true", "NETWORK_ISOLATION_INSTANCE_NAME": "crc", "NETWORK_ISOLATION_IPV4": "true", "NETWORK_ISOLATION_IPV4_ADDRESS": "172.16.1.1/24", "NETWORK_ISOLATION_IPV4_NAT": "true", "NETWORK_ISOLATION_IPV6": "false", "NETWORK_ISOLATION_IPV6_ADDRESS": "fd00:aaaa::1/64", "NETWORK_ISOLATION_IP_ADDRESS": "192.168.122.10", "NETWORK_ISOLATION_MAC": "52:54:00:11:11:10", "NETWORK_ISOLATION_NETWORK_NAME": "net-iso", "NETWORK_ISOLATION_NET_NAME": "default", "NETWORK_ISOLATION_USE_DEFAULT_NETWORK": "true", "NETWORK_MTU": "1500", "NETWORK_STORAGEMGMT_ADDRESS_PREFIX": "172.20.0", "NETWORK_STORAGE_ADDRESS_PREFIX": "172.18.0", "NETWORK_STORAGE_MACVLAN": "", "NETWORK_TENANT_ADDRESS_PREFIX": "172.19.0", "NETWORK_VLAN_START": "20", "NETWORK_VLAN_STEP": "1", "NEUTRONAPI": "config/samples/neutron_v1beta1_neutronapi.yaml", "NEUTRONAPI_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml", "NEUTRONAPI_DEPL_IMG": "unused", "NEUTRON_BRANCH": "main", "NEUTRON_COMMIT_HASH": "", "NEUTRON_IMG": "quay.io/openstack-k8s-operators/neutron-operator-index:latest", "NEUTRON_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml", "NEUTRON_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests", "NEUTRON_KUTTL_NAMESPACE": "neutron-kuttl-tests", "NEUTRON_REPO": "https://github.com/openstack-k8s-operators/neutron-operator.git", "NFS_HOME": "/home/nfs", "NMSTATE_NAMESPACE": "openshift-nmstate", "NMSTATE_OPERATOR_GROUP": "openshift-nmstate-tn6k8", "NMSTATE_SUBSCRIPTION": "kubernetes-nmstate-operator", "NNCP_ADDITIONAL_HOST_ROUTES": "", "NNCP_BGP_1_INTERFACE": "enp7s0", "NNCP_BGP_1_IP_ADDRESS": "100.65.4.2", "NNCP_BGP_2_INTERFACE": "enp8s0", "NNCP_BGP_2_IP_ADDRESS": "100.64.4.2", "NNCP_BRIDGE": "ospbr", "NNCP_CLEANUP_TIMEOUT": "120s", "NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX": "fd00:aaaa::", "NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX": "10", "NNCP_CTLPLANE_IP_ADDRESS_PREFIX": "192.168.122", "NNCP_CTLPLANE_IP_ADDRESS_SUFFIX": "10", "NNCP_DNS_SERVER": "192.168.122.1", "NNCP_DNS_SERVER_IPV6": "fd00:aaaa::1", "NNCP_GATEWAY": "192.168.122.1", "NNCP_GATEWAY_IPV6": "fd00:aaaa::1", "NNCP_INTERFACE": "enp6s0", "NNCP_NODES": "", "NNCP_TIMEOUT": "240s", "NOVA": "config/samples/nova_v1beta1_nova_collapsed_cell.yaml", "NOVA_BRANCH": "main", "NOVA_COMMIT_HASH": "", "NOVA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml", "NOVA_IMG": "quay.io/openstack-k8s-operators/nova-operator-index:latest", "NOVA_REPO": "https://github.com/openstack-k8s-operators/nova-operator.git", "NUMBER_OF_INSTANCES": "1", "OCP_NETWORK_NAME": "crc", "OCTAVIA": "config/samples/octavia_v1beta1_octavia.yaml", "OCTAVIA_BRANCH": "main", "OCTAVIA_COMMIT_HASH": "", "OCTAVIA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml", "OCTAVIA_IMG": "quay.io/openstack-k8s-operators/octavia-operator-index:latest", "OCTAVIA_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml", "OCTAVIA_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/tests/kuttl/tests", "OCTAVIA_KUTTL_NAMESPACE": "octavia-kuttl-tests", "OCTAVIA_REPO": "https://github.com/openstack-k8s-operators/octavia-operator.git", "OKD": "false", "OPENSTACK_BRANCH": "main", "OPENSTACK_BUNDLE_IMG": "quay.io/openstack-k8s-operators/openstack-operator-bundle:latest", "OPENSTACK_COMMIT_HASH": "", "OPENSTACK_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml", "OPENSTACK_CRDS_DIR": "openstack_crds", "OPENSTACK_CTLPLANE": "config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml", "OPENSTACK_IMG": "quay.io/openstack-k8s-operators/openstack-operator-index:latest", "OPENSTACK_K8S_BRANCH": "main", "OPENSTACK_K8S_TAG": "latest", "OPENSTACK_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml", "OPENSTACK_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/tests/kuttl/tests", "OPENSTACK_KUTTL_NAMESPACE": "openstack-kuttl-tests", "OPENSTACK_NEUTRON_CUSTOM_CONF": "", "OPENSTACK_REPO": "https://github.com/openstack-k8s-operators/openstack-operator.git", "OPENSTACK_STORAGE_BUNDLE_IMG": "quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest", "OPERATOR_BASE_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator", "OPERATOR_CHANNEL": "", "OPERATOR_NAMESPACE": "openstack-operators", "OPERATOR_SOURCE": "", "OPERATOR_SOURCE_NAMESPACE": "", "OUT": "/home/zuul/ci-framework-data/artifacts/manifests", "OUTPUT_DIR": "/home/zuul/ci-framework-data/artifacts/edpm", "OVNCONTROLLER": "config/samples/ovn_v1beta1_ovncontroller.yaml", "OVNCONTROLLER_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml", "OVNCONTROLLER_NMAP": "true", "OVNDBS": "config/samples/ovn_v1beta1_ovndbcluster.yaml", "OVNDBS_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml", "OVNNORTHD": "config/samples/ovn_v1beta1_ovnnorthd.yaml", "OVNNORTHD_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml", "OVN_BRANCH": "main", "OVN_COMMIT_HASH": "", "OVN_IMG": "quay.io/openstack-k8s-operators/ovn-operator-index:latest", "OVN_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml", "OVN_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/tests/kuttl/tests", "OVN_KUTTL_NAMESPACE": "ovn-kuttl-tests", "OVN_REPO": "https://github.com/openstack-k8s-operators/ovn-operator.git", "PASSWORD": "12345678", "PLACEMENTAPI": "config/samples/placement_v1beta1_placementapi.yaml", "PLACEMENTAPI_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml", "PLACEMENTAPI_DEPL_IMG": "unused", "PLACEMENT_BRANCH": "main", "PLACEMENT_COMMIT_HASH": "", "PLACEMENT_IMG": "quay.io/openstack-k8s-operators/placement-operator-index:latest", "PLACEMENT_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml", "PLACEMENT_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/tests/kuttl/tests", "PLACEMENT_KUTTL_NAMESPACE": "placement-kuttl-tests", "PLACEMENT_REPO": "https://github.com/openstack-k8s-operators/placement-operator.git", "PULL_SECRET": "/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/pull-secret.txt", "RABBITMQ": "docs/examples/default-security-context/rabbitmq.yaml", "RABBITMQ_BRANCH": "patches", "RABBITMQ_COMMIT_HASH": "", "RABBITMQ_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml", "RABBITMQ_DEPL_IMG": "unused", "RABBITMQ_IMG": "quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest", "RABBITMQ_REPO": "https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git", "REDHAT_OPERATORS": "false", "REDIS": "config/samples/redis_v1beta1_redis.yaml", "REDIS_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml", "REDIS_DEPL_IMG": "unused", "RH_REGISTRY_PWD": "", "RH_REGISTRY_USER": "", "SECRET": "osp-secret", "SG_CORE_DEPL_IMG": "unused", "STANDALONE_COMPUTE_DRIVER": "libvirt", "STANDALONE_EXTERNAL_NET_PREFFIX": "172.21.0", "STANDALONE_INTERNALAPI_NET_PREFIX": "172.17.0", "STANDALONE_STORAGEMGMT_NET_PREFIX": "172.20.0", "STANDALONE_STORAGE_NET_PREFIX": "172.18.0", "STANDALONE_TENANT_NET_PREFIX": "172.19.0", "STORAGEMGMT_HOST_ROUTES": "", "STORAGE_CLASS": "local-storage", "STORAGE_HOST_ROUTES": "", "SWIFT": "config/samples/swift_v1beta1_swift.yaml", "SWIFT_BRANCH": "main", "SWIFT_COMMIT_HASH": "", "SWIFT_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml", "SWIFT_IMG": "quay.io/openstack-k8s-operators/swift-operator-index:latest", "SWIFT_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml", "SWIFT_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/tests/kuttl/tests", "SWIFT_KUTTL_NAMESPACE": "swift-kuttl-tests", "SWIFT_REPO": "https://github.com/openstack-k8s-operators/swift-operator.git", "TELEMETRY": "config/samples/telemetry_v1beta1_telemetry.yaml", "TELEMETRY_BRANCH": "main", "TELEMETRY_COMMIT_HASH": "", "TELEMETRY_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml", "TELEMETRY_IMG": "quay.io/openstack-k8s-operators/telemetry-operator-index:latest", "TELEMETRY_KUTTL_BASEDIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator", "TELEMETRY_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml", "TELEMETRY_KUTTL_NAMESPACE": "telemetry-kuttl-tests", "TELEMETRY_KUTTL_RELPATH": "tests/kuttl/suites", "TELEMETRY_REPO": "https://github.com/openstack-k8s-operators/telemetry-operator.git", "TENANT_HOST_ROUTES": "", "TIMEOUT": "300s", "TLS_ENABLED": "false", "WATCHER_BRANCH": "", "WATCHER_REPO": "/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator", "tripleo_deploy": "export REGISTRY_USER:" }, "cifmw_install_yamls_environment": { "BMO_SETUP": false, "CHECKOUT_FROM_OPENSTACK_REF": "true", "INSTALL_CERT_MANAGER": false, "KUBECONFIG": "/home/zuul/.crc/machines/crc/kubeconfig", "OPENSTACK_K8S_BRANCH": "main", "OUT": "/home/zuul/ci-framework-data/artifacts/manifests", "OUTPUT_DIR": "/home/zuul/ci-framework-data/artifacts/edpm", "WATCHER_BRANCH": "", "WATCHER_REPO": "/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator" }, "cifmw_openshift_api": "https://api.crc.testing:6443", "cifmw_openshift_context": "default/api-crc-testing:6443/kubeadmin", "cifmw_openshift_kubeconfig": "/home/zuul/.crc/machines/crc/kubeconfig", "cifmw_openshift_login_api": "https://api.crc.testing:6443", "cifmw_openshift_login_cert_login": false, "cifmw_openshift_login_context": "default/api-crc-testing:6443/kubeadmin", "cifmw_openshift_login_kubeconfig": "/home/zuul/.crc/machines/crc/kubeconfig", "cifmw_openshift_login_password": 123456789, "cifmw_openshift_login_token": "sha256~P3wSbAcnlmEhrPzxrn050R94H8AJvFSnNcYf8bPOA3c", "cifmw_openshift_login_user": "kubeadmin", "cifmw_openshift_token": "sha256~P3wSbAcnlmEhrPzxrn050R94H8AJvFSnNcYf8bPOA3c", "cifmw_openshift_user": "kubeadmin", "cifmw_path": "/home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin", "cifmw_repo_setup_commit_hash": null, "cifmw_repo_setup_distro_hash": null, "cifmw_repo_setup_dlrn_api_url": "https://trunk.rdoproject.org/api-centos9-antelope", "cifmw_repo_setup_dlrn_url": "https://trunk.rdoproject.org/centos9-antelope/current-podified/delorean.repo.md5", "cifmw_repo_setup_extended_hash": null, "cifmw_repo_setup_full_hash": "b78cfc68a577b1553523c8a70a34e297", "cifmw_repo_setup_release": "antelope", "discovered_interpreter_python": "/usr/bin/python3", "gather_subset": [ "all" ], "module_setup": true }home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/0000755000175000017500000000000015071030352024502 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/openshift-login-params.yml0000644000175000017500000000043015071030171031607 0ustar zuulzuulcifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_context: default/api-crc-testing:6443/kubeadmin cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_token: sha256~P3wSbAcnlmEhrPzxrn050R94H8AJvFSnNcYf8bPOA3c cifmw_openshift_user: kubeadmin home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/install-yamls-params.yml0000644000175000017500000006677415071030352031323 0ustar zuulzuulcifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/tests/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/tests/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: false BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/tests/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 767c3ed056cbaa3b9dfedb8c6f825bf0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/tests/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/tests/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/tests/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: false INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/tests/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: 'CO**********6f' KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/tests/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/tests/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '12**********42' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/tests/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/tests/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/tests/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12**********78' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/tests/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: 'os**********et' SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/tests/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: tests/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' WATCHER_BRANCH: '' WATCHER_REPO: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator tripleo_deploy: 'export REGISTRY_USER:' cifmw_install_yamls_environment: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' INSTALL_CERT_MANAGER: false KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm WATCHER_BRANCH: '' WATCHER_REPO: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/custom-params.yml0000644000175000017500000002057415071030125030026 0ustar zuulzuulcifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_basedir: /home/zuul/ci-framework-data cifmw_build_images_output: {} cifmw_config_certmanager: true cifmw_deploy_edpm: true cifmw_dlrn_report_result: false cifmw_edpm_prepare_kustomizations: - apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization namespace: openstack patches: - patch: "apiVersion: core.openstack.org/v1beta1\nkind: OpenStackControlPlane\nmetadata:\n \ name: controlplane\nspec:\n telemetry:\n enabled: true\n template:\n \ ceilometer:\n enabled: true\n metricStorage:\n enabled: true\n customMonitoringStack:\n alertmanagerConfig:\n \ disabled: true\n prometheusConfig:\n enableRemoteWriteReceiver: true\n persistentVolumeClaim:\n resources:\n requests:\n \ storage: 20G\n replicas: 1\n scrapeInterval: 30s\n resourceSelector:\n matchLabels:\n service: metricStorage\n retention: 24h" target: kind: OpenStackControlPlane - patch: "apiVersion: core.openstack.org/v1beta1\nkind: OpenStackControlPlane\nmetadata:\n \ name: controlplane\nspec:\n telemetry:\n template:\n metricStorage:\n \ monitoringStack: null" target: kind: OpenStackControlPlane - patch: "apiVersion: core.openstack.org/v1beta1\nkind: OpenStackControlPlane\nmetadata:\n \ name: controlplane\nspec:\n watcher:\n enabled: true\n template:\n \ decisionengineServiceTemplate:\n customServiceConfig: |\n \ [watcher_cluster_data_model_collectors.compute]\n period = 60\n [watcher_cluster_data_model_collectors.storage]\n period = 60" target: kind: OpenStackControlPlane cifmw_edpm_prepare_skip_crc_storage_creation: true cifmw_edpm_prepare_timeout: 60 cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/multinode-ci.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/scenarios/centos-9/horizon.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/scenarios/edpm-no-notifications.yml' - '@/home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/tests/watcher-tempest.yml' cifmw_installyamls_repos: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_password: '123456789' cifmw_openshift_setup_skip_internal_registry: true cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_meta_name: openstack-operator cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cifmw_repo_setup_dist_major_version: 9 cifmw_repo_setup_os_release: centos cifmw_run_test_role: test_operator cifmw_run_tests: true cifmw_test_operator_tempest_concurrency: 1 cifmw_test_operator_tempest_exclude_list: 'watcher_tempest_plugin.*client_functional.* watcher_tempest_plugin.tests.scenario.test_execute_strategies.TestExecuteStrategies.test_execute_storage_capacity_balance_strategy watcher_tempest_plugin.*\[.*\breal_load\b.*\].* watcher_tempest_plugin.tests.scenario.test_execute_zone_migration.TestExecuteZoneMigrationStrategy.test_execute_zone_migration_without_destination_host watcher_tempest_plugin.*\[.*\bvolume_migration\b.*\].* ' cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_include_list: 'watcher_tempest_plugin.* ' cifmw_test_operator_tempest_namespace: podified-epoxy-centos9 cifmw_test_operator_tempest_registry: 38.102.83.53:5001 cifmw_test_operator_tempest_tempestconf_config: overrides: 'compute.min_microversion 2.56 compute.min_compute_nodes 2 placement.min_microversion 1.29 compute-feature-enabled.live_migration true compute-feature-enabled.block_migration_for_live_migration true service_available.sg_core true telemetry_services.metric_backends prometheus telemetry.disable_ssl_certificate_validation true telemetry.ceilometer_polling_interval 15 optimize.min_microversion 1.0 optimize.max_microversion 1.4 optimize.datasource prometheus optimize.openstack_type podified optimize.proxy_host_address 38.102.83.51 optimize.proxy_host_user zuul optimize.prometheus_host metric-storage-prometheus.openstack.svc optimize.prometheus_ssl_enabled true optimize.prometheus_ssl_cert_dir /etc/prometheus/secrets/combined-ca-bundle optimize.podified_kubeconfig_path /home/zuul/.crc/machines/crc/kubeconfig optimize.podified_namespace openstack optimize.run_continuous_audit_tests true ' cifmw_update_containers: true cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: 38.102.83.53:5001 cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller post_ctlplane_deploy: - name: Tune rabbitmq resources source: rabbitmq_tuning.yml type: playbook post_deploy: - inventory: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/hosts name: Download needed tools source: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml type: playbook - name: Patch Openstack Prometheus to enable admin API source: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/playbooks/prometheus_admin_api.yaml type: playbook post_infra: - inventory: /home/zuul/ci-framework-data/artifacts/zuul_inventory.yml name: Fetch nodes facts and save them as parameters source: fetch_compute_facts.yml type: playbook pre_deploy: - name: 80 Kustomize OpenStack CR source: control_plane_horizon.yml type: playbook pre_deploy_create_coo_subscription: - name: Deploy cluster-observability-operator source: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator/ci/playbooks/deploy_cluster_observability_operator.yaml type: playbook pre_infra: - connection: local inventory: localhost, name: Download needed tools source: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml type: playbook pre_update: - inventory: /home/zuul/ci-framework-data/artifacts/zuul_inventory.yml name: Fetch nodes facts and save them as parameters source: fetch_compute_facts.yml type: playbook home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/zuul-params.yml0000644000175000017500000004652015071030030027505 0ustar zuulzuulcifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 9ce4c11f9f6a4904bf6148a8276a3232 build_refs: - branch: main change: '287' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null buildset: f9416ac601264548b137ce1f44fe627c buildset_refs: - branch: main change: '287' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null change: '287' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 child_jobs: [] commit_id: 14377136e67c9cd67507a059bfde2f19f140387d event_id: 7dde6e80-a2f2-11f0-83f1-b4af7183f5ac executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/logs result_data_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/results.json src_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/src work_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work items: - branch: main change: '287' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null job: watcher-operator-validation-epoxy-ocp4-16 jobtags: [] max_attempts: 1 message: W1dJUF0gTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIHRvIGNvbnRyb2xwbGFuZSBsZXZlbAoKTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIGZyb20gZW5hYmxpbmcgYXQgbm92YS9jaW5kZXIvd2F0Y2hlciBsZXZlbCB0byBvcGVuc3RhY2sgY29udHJvbHBsYW5lIGxldmVsIGFmdGVyIHRoYXQgdXNhZ2UgaXMgYXZhaWxhYmxlIHNpbmNlIGh0dHBzOi8vZ2l0aHViLmNvbS9vcGVuc3RhY2stazhzLW9wZXJhdG9ycy9vcGVuc3RhY2stb3BlcmF0b3IvcHVsbC8xNTkx patchset: 14377136e67c9cd67507a059bfde2f19f140387d pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 playbooks: - path: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/edpm/run.yml roles: - checkout: main checkout_description: playbook branch link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: f6ed2f2d118884a075895bbf954ff6000e540430 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: zuul branch commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: zuul branch commit: 95aa63de3182faad63a69301d101debad3efc936 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: zuul branch commit: 2b5048bbcae44dfeaacbb43830318ca45c13f182 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: zuul branch commit: bb26118ddc70016cbd2118a0b0a35d5f6ab9c343 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: main checkout_description: zuul branch commit: 3bf7652f010ead15ac2d2fec7e3b71c442b8fb8d name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: zuul branch commit: 748dff8508cbb49e00426d46a4487b9f4c0b0096 name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: zuul branch commit: 245af87e94976809f2023f59c19dffb95df97ed9 name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: zuul branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup github.com/openstack-k8s-operators/watcher-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator checkout: main checkout_description: zuul branch commit: 14377136e67c9cd67507a059bfde2f19f140387d name: openstack-k8s-operators/watcher-operator required: false short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 941f6f7666fdff0145523beb29ceda8db25c234c name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/287/head resources: {} tenant: rdoproject.org timeout: 10800 topic: null voting: true zuul_log_collection: false home/zuul/zuul-output/logs/ci-framework-data/artifacts/installed-packages.yml0000644000175000017500000023207415071030306026624 0ustar zuulzuulNetworkManager: - arch: x86_64 epoch: 1 name: NetworkManager release: 1.el9 source: rpm version: 1.54.1 NetworkManager-libnm: - arch: x86_64 epoch: 1 name: NetworkManager-libnm release: 1.el9 source: rpm version: 1.54.1 NetworkManager-team: - arch: x86_64 epoch: 1 name: NetworkManager-team release: 1.el9 source: rpm version: 1.54.1 NetworkManager-tui: - arch: x86_64 epoch: 1 name: NetworkManager-tui release: 1.el9 source: rpm version: 1.54.1 PackageKit: - arch: x86_64 epoch: null name: PackageKit release: 1.el9 source: rpm version: 1.2.6 PackageKit-glib: - arch: x86_64 epoch: null name: PackageKit-glib release: 1.el9 source: rpm version: 1.2.6 aardvark-dns: - arch: x86_64 epoch: 2 name: aardvark-dns release: 1.el9 source: rpm version: 1.16.0 abattis-cantarell-fonts: - arch: noarch epoch: null name: abattis-cantarell-fonts release: 4.el9 source: rpm version: '0.301' acl: - arch: x86_64 epoch: null name: acl release: 4.el9 source: rpm version: 2.3.1 adobe-source-code-pro-fonts: - arch: noarch epoch: null name: adobe-source-code-pro-fonts release: 12.el9.1 source: rpm version: 2.030.1.050 alternatives: - arch: x86_64 epoch: null name: alternatives release: 2.el9 source: rpm version: '1.24' annobin: - arch: x86_64 epoch: null name: annobin release: 1.el9 source: rpm version: '12.98' ansible-core: - arch: x86_64 epoch: 1 name: ansible-core release: 1.el9 source: rpm version: 2.14.18 apr: - arch: x86_64 epoch: null name: apr release: 12.el9 source: rpm version: 1.7.0 apr-util: - arch: x86_64 epoch: null name: apr-util release: 23.el9 source: rpm version: 1.6.1 apr-util-bdb: - arch: x86_64 epoch: null name: apr-util-bdb release: 23.el9 source: rpm version: 1.6.1 apr-util-openssl: - arch: x86_64 epoch: null name: apr-util-openssl release: 23.el9 source: rpm version: 1.6.1 attr: - arch: x86_64 epoch: null name: attr release: 3.el9 source: rpm version: 2.5.1 audit: - arch: x86_64 epoch: null name: audit release: 7.el9 source: rpm version: 3.1.5 audit-libs: - arch: x86_64 epoch: null name: audit-libs release: 7.el9 source: rpm version: 3.1.5 authselect: - arch: x86_64 epoch: null name: authselect release: 3.el9 source: rpm version: 1.2.6 authselect-compat: - arch: x86_64 epoch: null name: authselect-compat release: 3.el9 source: rpm version: 1.2.6 authselect-libs: - arch: x86_64 epoch: null name: authselect-libs release: 3.el9 source: rpm version: 1.2.6 avahi-libs: - arch: x86_64 epoch: null name: avahi-libs release: 23.el9 source: rpm version: '0.8' basesystem: - arch: noarch epoch: null name: basesystem release: 13.el9 source: rpm version: '11' bash: - arch: x86_64 epoch: null name: bash release: 9.el9 source: rpm version: 5.1.8 bash-completion: - arch: noarch epoch: 1 name: bash-completion release: 5.el9 source: rpm version: '2.11' binutils: - arch: x86_64 epoch: null name: binutils release: 67.el9 source: rpm version: 2.35.2 binutils-gold: - arch: x86_64 epoch: null name: binutils-gold release: 67.el9 source: rpm version: 2.35.2 buildah: - arch: x86_64 epoch: 2 name: buildah release: 1.el9 source: rpm version: 1.41.3 bzip2: - arch: x86_64 epoch: null name: bzip2 release: 10.el9 source: rpm version: 1.0.8 bzip2-libs: - arch: x86_64 epoch: null name: bzip2-libs release: 10.el9 source: rpm version: 1.0.8 c-ares: - arch: x86_64 epoch: null name: c-ares release: 2.el9 source: rpm version: 1.19.1 ca-certificates: - arch: noarch epoch: null name: ca-certificates release: 91.4.el9 source: rpm version: 2024.2.69_v8.0.303 centos-gpg-keys: - arch: noarch epoch: null name: centos-gpg-keys release: 30.el9 source: rpm version: '9.0' centos-logos: - arch: x86_64 epoch: null name: centos-logos release: 3.el9 source: rpm version: '90.8' centos-stream-release: - arch: noarch epoch: null name: centos-stream-release release: 30.el9 source: rpm version: '9.0' centos-stream-repos: - arch: noarch epoch: null name: centos-stream-repos release: 30.el9 source: rpm version: '9.0' checkpolicy: - arch: x86_64 epoch: null name: checkpolicy release: 1.el9 source: rpm version: '3.6' chrony: - arch: x86_64 epoch: null name: chrony release: 2.el9 source: rpm version: 4.6.1 cloud-init: - arch: noarch epoch: null name: cloud-init release: 7.el9 source: rpm version: '24.4' cloud-utils-growpart: - arch: x86_64 epoch: null name: cloud-utils-growpart release: 1.el9 source: rpm version: '0.33' cmake-filesystem: - arch: x86_64 epoch: null name: cmake-filesystem release: 2.el9 source: rpm version: 3.26.5 cockpit-bridge: - arch: noarch epoch: null name: cockpit-bridge release: 1.el9 source: rpm version: '347' cockpit-system: - arch: noarch epoch: null name: cockpit-system release: 1.el9 source: rpm version: '347' cockpit-ws: - arch: x86_64 epoch: null name: cockpit-ws release: 1.el9 source: rpm version: '347' cockpit-ws-selinux: - arch: x86_64 epoch: null name: cockpit-ws-selinux release: 1.el9 source: rpm version: '347' conmon: - arch: x86_64 epoch: 3 name: conmon release: 1.el9 source: rpm version: 2.1.13 container-selinux: - arch: noarch epoch: 4 name: container-selinux release: 1.el9 source: rpm version: 2.242.0 containers-common: - arch: x86_64 epoch: 4 name: containers-common release: 134.el9 source: rpm version: '1' containers-common-extra: - arch: x86_64 epoch: 4 name: containers-common-extra release: 134.el9 source: rpm version: '1' coreutils: - arch: x86_64 epoch: null name: coreutils release: 39.el9 source: rpm version: '8.32' coreutils-common: - arch: x86_64 epoch: null name: coreutils-common release: 39.el9 source: rpm version: '8.32' cpio: - arch: x86_64 epoch: null name: cpio release: 16.el9 source: rpm version: '2.13' cpp: - arch: x86_64 epoch: null name: cpp release: 11.el9 source: rpm version: 11.5.0 cracklib: - arch: x86_64 epoch: null name: cracklib release: 27.el9 source: rpm version: 2.9.6 cracklib-dicts: - arch: x86_64 epoch: null name: cracklib-dicts release: 27.el9 source: rpm version: 2.9.6 createrepo_c: - arch: x86_64 epoch: null name: createrepo_c release: 4.el9 source: rpm version: 0.20.1 createrepo_c-libs: - arch: x86_64 epoch: null name: createrepo_c-libs release: 4.el9 source: rpm version: 0.20.1 criu: - arch: x86_64 epoch: null name: criu release: 3.el9 source: rpm version: '3.19' criu-libs: - arch: x86_64 epoch: null name: criu-libs release: 3.el9 source: rpm version: '3.19' cronie: - arch: x86_64 epoch: null name: cronie release: 14.el9 source: rpm version: 1.5.7 cronie-anacron: - arch: x86_64 epoch: null name: cronie-anacron release: 14.el9 source: rpm version: 1.5.7 crontabs: - arch: noarch epoch: null name: crontabs release: 26.20190603git.el9 source: rpm version: '1.11' crun: - arch: x86_64 epoch: null name: crun release: 1.el9 source: rpm version: '1.24' crypto-policies: - arch: noarch epoch: null name: crypto-policies release: 1.git377cc42.el9 source: rpm version: '20250905' crypto-policies-scripts: - arch: noarch epoch: null name: crypto-policies-scripts release: 1.git377cc42.el9 source: rpm version: '20250905' cryptsetup-libs: - arch: x86_64 epoch: null name: cryptsetup-libs release: 2.el9 source: rpm version: 2.8.1 curl: - arch: x86_64 epoch: null name: curl release: 34.el9 source: rpm version: 7.76.1 cyrus-sasl: - arch: x86_64 epoch: null name: cyrus-sasl release: 21.el9 source: rpm version: 2.1.27 cyrus-sasl-devel: - arch: x86_64 epoch: null name: cyrus-sasl-devel release: 21.el9 source: rpm version: 2.1.27 cyrus-sasl-gssapi: - arch: x86_64 epoch: null name: cyrus-sasl-gssapi release: 21.el9 source: rpm version: 2.1.27 cyrus-sasl-lib: - arch: x86_64 epoch: null name: cyrus-sasl-lib release: 21.el9 source: rpm version: 2.1.27 dbus: - arch: x86_64 epoch: 1 name: dbus release: 8.el9 source: rpm version: 1.12.20 dbus-broker: - arch: x86_64 epoch: null name: dbus-broker release: 7.el9 source: rpm version: '28' dbus-common: - arch: noarch epoch: 1 name: dbus-common release: 8.el9 source: rpm version: 1.12.20 dbus-libs: - arch: x86_64 epoch: 1 name: dbus-libs release: 8.el9 source: rpm version: 1.12.20 dbus-tools: - arch: x86_64 epoch: 1 name: dbus-tools release: 8.el9 source: rpm version: 1.12.20 debugedit: - arch: x86_64 epoch: null name: debugedit release: 11.el9 source: rpm version: '5.0' dejavu-sans-fonts: - arch: noarch epoch: null name: dejavu-sans-fonts release: 18.el9 source: rpm version: '2.37' desktop-file-utils: - arch: x86_64 epoch: null name: desktop-file-utils release: 6.el9 source: rpm version: '0.26' device-mapper: - arch: x86_64 epoch: 9 name: device-mapper release: 2.el9 source: rpm version: 1.02.206 device-mapper-libs: - arch: x86_64 epoch: 9 name: device-mapper-libs release: 2.el9 source: rpm version: 1.02.206 dhcp-client: - arch: x86_64 epoch: 12 name: dhcp-client release: 19.b1.el9 source: rpm version: 4.4.2 dhcp-common: - arch: noarch epoch: 12 name: dhcp-common release: 19.b1.el9 source: rpm version: 4.4.2 diffutils: - arch: x86_64 epoch: null name: diffutils release: 12.el9 source: rpm version: '3.7' dnf: - arch: noarch epoch: null name: dnf release: 31.el9 source: rpm version: 4.14.0 dnf-data: - arch: noarch epoch: null name: dnf-data release: 31.el9 source: rpm version: 4.14.0 dnf-plugins-core: - arch: noarch epoch: null name: dnf-plugins-core release: 23.el9 source: rpm version: 4.3.0 dracut: - arch: x86_64 epoch: null name: dracut release: 102.git20250818.el9 source: rpm version: '057' dracut-config-generic: - arch: x86_64 epoch: null name: dracut-config-generic release: 102.git20250818.el9 source: rpm version: '057' dracut-network: - arch: x86_64 epoch: null name: dracut-network release: 102.git20250818.el9 source: rpm version: '057' dracut-squash: - arch: x86_64 epoch: null name: dracut-squash release: 102.git20250818.el9 source: rpm version: '057' dwz: - arch: x86_64 epoch: null name: dwz release: 1.el9 source: rpm version: '0.16' e2fsprogs: - arch: x86_64 epoch: null name: e2fsprogs release: 8.el9 source: rpm version: 1.46.5 e2fsprogs-libs: - arch: x86_64 epoch: null name: e2fsprogs-libs release: 8.el9 source: rpm version: 1.46.5 ed: - arch: x86_64 epoch: null name: ed release: 12.el9 source: rpm version: 1.14.2 efi-srpm-macros: - arch: noarch epoch: null name: efi-srpm-macros release: 4.el9 source: rpm version: '6' elfutils: - arch: x86_64 epoch: null name: elfutils release: 1.el9 source: rpm version: '0.193' elfutils-debuginfod-client: - arch: x86_64 epoch: null name: elfutils-debuginfod-client release: 1.el9 source: rpm version: '0.193' elfutils-default-yama-scope: - arch: noarch epoch: null name: elfutils-default-yama-scope release: 1.el9 source: rpm version: '0.193' elfutils-libelf: - arch: x86_64 epoch: null name: elfutils-libelf release: 1.el9 source: rpm version: '0.193' elfutils-libs: - arch: x86_64 epoch: null name: elfutils-libs release: 1.el9 source: rpm version: '0.193' emacs-filesystem: - arch: noarch epoch: 1 name: emacs-filesystem release: 18.el9 source: rpm version: '27.2' enchant: - arch: x86_64 epoch: 1 name: enchant release: 30.el9 source: rpm version: 1.6.0 ethtool: - arch: x86_64 epoch: 2 name: ethtool release: 2.el9 source: rpm version: '6.15' expat: - arch: x86_64 epoch: null name: expat release: 5.el9 source: rpm version: 2.5.0 expect: - arch: x86_64 epoch: null name: expect release: 16.el9 source: rpm version: 5.45.4 file: - arch: x86_64 epoch: null name: file release: 16.el9 source: rpm version: '5.39' file-libs: - arch: x86_64 epoch: null name: file-libs release: 16.el9 source: rpm version: '5.39' filesystem: - arch: x86_64 epoch: null name: filesystem release: 5.el9 source: rpm version: '3.16' findutils: - arch: x86_64 epoch: 1 name: findutils release: 7.el9 source: rpm version: 4.8.0 fonts-filesystem: - arch: noarch epoch: 1 name: fonts-filesystem release: 7.el9.1 source: rpm version: 2.0.5 fonts-srpm-macros: - arch: noarch epoch: 1 name: fonts-srpm-macros release: 7.el9.1 source: rpm version: 2.0.5 fuse-common: - arch: x86_64 epoch: null name: fuse-common release: 9.el9 source: rpm version: 3.10.2 fuse-libs: - arch: x86_64 epoch: null name: fuse-libs release: 17.el9 source: rpm version: 2.9.9 fuse-overlayfs: - arch: x86_64 epoch: null name: fuse-overlayfs release: 1.el9 source: rpm version: '1.15' fuse3: - arch: x86_64 epoch: null name: fuse3 release: 9.el9 source: rpm version: 3.10.2 fuse3-libs: - arch: x86_64 epoch: null name: fuse3-libs release: 9.el9 source: rpm version: 3.10.2 gawk: - arch: x86_64 epoch: null name: gawk release: 6.el9 source: rpm version: 5.1.0 gawk-all-langpacks: - arch: x86_64 epoch: null name: gawk-all-langpacks release: 6.el9 source: rpm version: 5.1.0 gcc: - arch: x86_64 epoch: null name: gcc release: 11.el9 source: rpm version: 11.5.0 gcc-c++: - arch: x86_64 epoch: null name: gcc-c++ release: 11.el9 source: rpm version: 11.5.0 gcc-plugin-annobin: - arch: x86_64 epoch: null name: gcc-plugin-annobin release: 11.el9 source: rpm version: 11.5.0 gdb-minimal: - arch: x86_64 epoch: null name: gdb-minimal release: 2.el9 source: rpm version: '16.3' gdbm-libs: - arch: x86_64 epoch: 1 name: gdbm-libs release: 1.el9 source: rpm version: '1.23' gdisk: - arch: x86_64 epoch: null name: gdisk release: 5.el9 source: rpm version: 1.0.7 gdk-pixbuf2: - arch: x86_64 epoch: null name: gdk-pixbuf2 release: 6.el9 source: rpm version: 2.42.6 geolite2-city: - arch: noarch epoch: null name: geolite2-city release: 6.el9 source: rpm version: '20191217' geolite2-country: - arch: noarch epoch: null name: geolite2-country release: 6.el9 source: rpm version: '20191217' gettext: - arch: x86_64 epoch: null name: gettext release: 8.el9 source: rpm version: '0.21' gettext-libs: - arch: x86_64 epoch: null name: gettext-libs release: 8.el9 source: rpm version: '0.21' ghc-srpm-macros: - arch: noarch epoch: null name: ghc-srpm-macros release: 6.el9 source: rpm version: 1.5.0 git: - arch: x86_64 epoch: null name: git release: 1.el9 source: rpm version: 2.47.3 git-core: - arch: x86_64 epoch: null name: git-core release: 1.el9 source: rpm version: 2.47.3 git-core-doc: - arch: noarch epoch: null name: git-core-doc release: 1.el9 source: rpm version: 2.47.3 glib-networking: - arch: x86_64 epoch: null name: glib-networking release: 3.el9 source: rpm version: 2.68.3 glib2: - arch: x86_64 epoch: null name: glib2 release: 16.el9 source: rpm version: 2.68.4 glibc: - arch: x86_64 epoch: null name: glibc release: 232.el9 source: rpm version: '2.34' glibc-common: - arch: x86_64 epoch: null name: glibc-common release: 232.el9 source: rpm version: '2.34' glibc-devel: - arch: x86_64 epoch: null name: glibc-devel release: 232.el9 source: rpm version: '2.34' glibc-gconv-extra: - arch: x86_64 epoch: null name: glibc-gconv-extra release: 232.el9 source: rpm version: '2.34' glibc-headers: - arch: x86_64 epoch: null name: glibc-headers release: 232.el9 source: rpm version: '2.34' glibc-langpack-en: - arch: x86_64 epoch: null name: glibc-langpack-en release: 232.el9 source: rpm version: '2.34' gmp: - arch: x86_64 epoch: 1 name: gmp release: 13.el9 source: rpm version: 6.2.0 gnupg2: - arch: x86_64 epoch: null name: gnupg2 release: 4.el9 source: rpm version: 2.3.3 gnutls: - arch: x86_64 epoch: null name: gnutls release: 9.el9 source: rpm version: 3.8.3 go-srpm-macros: - arch: noarch epoch: null name: go-srpm-macros release: 11.el9 source: rpm version: 3.6.0 gobject-introspection: - arch: x86_64 epoch: null name: gobject-introspection release: 11.el9 source: rpm version: 1.68.0 gpg-pubkey: - arch: null epoch: null name: gpg-pubkey release: 5ccc5b19 source: rpm version: 8483c65d gpgme: - arch: x86_64 epoch: null name: gpgme release: 6.el9 source: rpm version: 1.15.1 grep: - arch: x86_64 epoch: null name: grep release: 5.el9 source: rpm version: '3.6' groff-base: - arch: x86_64 epoch: null name: groff-base release: 10.el9 source: rpm version: 1.22.4 grub2-common: - arch: noarch epoch: 1 name: grub2-common release: 115.el9 source: rpm version: '2.06' grub2-pc: - arch: x86_64 epoch: 1 name: grub2-pc release: 115.el9 source: rpm version: '2.06' grub2-pc-modules: - arch: noarch epoch: 1 name: grub2-pc-modules release: 115.el9 source: rpm version: '2.06' grub2-tools: - arch: x86_64 epoch: 1 name: grub2-tools release: 115.el9 source: rpm version: '2.06' grub2-tools-minimal: - arch: x86_64 epoch: 1 name: grub2-tools-minimal release: 115.el9 source: rpm version: '2.06' grubby: - arch: x86_64 epoch: null name: grubby release: 69.el9 source: rpm version: '8.40' gsettings-desktop-schemas: - arch: x86_64 epoch: null name: gsettings-desktop-schemas release: 7.el9 source: rpm version: '40.0' gssproxy: - arch: x86_64 epoch: null name: gssproxy release: 7.el9 source: rpm version: 0.8.4 gzip: - arch: x86_64 epoch: null name: gzip release: 1.el9 source: rpm version: '1.12' hostname: - arch: x86_64 epoch: null name: hostname release: 6.el9 source: rpm version: '3.23' httpd-tools: - arch: x86_64 epoch: null name: httpd-tools release: 7.el9 source: rpm version: 2.4.62 hunspell: - arch: x86_64 epoch: null name: hunspell release: 11.el9 source: rpm version: 1.7.0 hunspell-en-GB: - arch: noarch epoch: null name: hunspell-en-GB release: 20.el9 source: rpm version: 0.20140811.1 hunspell-en-US: - arch: noarch epoch: null name: hunspell-en-US release: 20.el9 source: rpm version: 0.20140811.1 hunspell-filesystem: - arch: x86_64 epoch: null name: hunspell-filesystem release: 11.el9 source: rpm version: 1.7.0 hwdata: - arch: noarch epoch: null name: hwdata release: 9.20.el9 source: rpm version: '0.348' ima-evm-utils: - arch: x86_64 epoch: null name: ima-evm-utils release: 2.el9 source: rpm version: 1.6.2 info: - arch: x86_64 epoch: null name: info release: 15.el9 source: rpm version: '6.7' inih: - arch: x86_64 epoch: null name: inih release: 6.el9 source: rpm version: '49' initscripts-rename-device: - arch: x86_64 epoch: null name: initscripts-rename-device release: 4.el9 source: rpm version: 10.11.8 initscripts-service: - arch: noarch epoch: null name: initscripts-service release: 4.el9 source: rpm version: 10.11.8 ipcalc: - arch: x86_64 epoch: null name: ipcalc release: 5.el9 source: rpm version: 1.0.0 iproute: - arch: x86_64 epoch: null name: iproute release: 2.el9 source: rpm version: 6.14.0 iproute-tc: - arch: x86_64 epoch: null name: iproute-tc release: 2.el9 source: rpm version: 6.14.0 iptables-libs: - arch: x86_64 epoch: null name: iptables-libs release: 11.el9 source: rpm version: 1.8.10 iptables-nft: - arch: x86_64 epoch: null name: iptables-nft release: 11.el9 source: rpm version: 1.8.10 iptables-nft-services: - arch: noarch epoch: null name: iptables-nft-services release: 11.el9 source: rpm version: 1.8.10 iputils: - arch: x86_64 epoch: null name: iputils release: 15.el9 source: rpm version: '20210202' irqbalance: - arch: x86_64 epoch: 2 name: irqbalance release: 4.el9 source: rpm version: 1.9.4 jansson: - arch: x86_64 epoch: null name: jansson release: 1.el9 source: rpm version: '2.14' jq: - arch: x86_64 epoch: null name: jq release: 19.el9 source: rpm version: '1.6' json-c: - arch: x86_64 epoch: null name: json-c release: 11.el9 source: rpm version: '0.14' json-glib: - arch: x86_64 epoch: null name: json-glib release: 1.el9 source: rpm version: 1.6.6 kbd: - arch: x86_64 epoch: null name: kbd release: 11.el9 source: rpm version: 2.4.0 kbd-legacy: - arch: noarch epoch: null name: kbd-legacy release: 11.el9 source: rpm version: 2.4.0 kbd-misc: - arch: noarch epoch: null name: kbd-misc release: 11.el9 source: rpm version: 2.4.0 kernel: - arch: x86_64 epoch: null name: kernel release: 620.el9 source: rpm version: 5.14.0 kernel-core: - arch: x86_64 epoch: null name: kernel-core release: 620.el9 source: rpm version: 5.14.0 kernel-headers: - arch: x86_64 epoch: null name: kernel-headers release: 620.el9 source: rpm version: 5.14.0 kernel-modules: - arch: x86_64 epoch: null name: kernel-modules release: 620.el9 source: rpm version: 5.14.0 kernel-modules-core: - arch: x86_64 epoch: null name: kernel-modules-core release: 620.el9 source: rpm version: 5.14.0 kernel-srpm-macros: - arch: noarch epoch: null name: kernel-srpm-macros release: 14.el9 source: rpm version: '1.0' kernel-tools: - arch: x86_64 epoch: null name: kernel-tools release: 620.el9 source: rpm version: 5.14.0 kernel-tools-libs: - arch: x86_64 epoch: null name: kernel-tools-libs release: 620.el9 source: rpm version: 5.14.0 kexec-tools: - arch: x86_64 epoch: null name: kexec-tools release: 10.el9 source: rpm version: 2.0.29 keyutils: - arch: x86_64 epoch: null name: keyutils release: 1.el9 source: rpm version: 1.6.3 keyutils-libs: - arch: x86_64 epoch: null name: keyutils-libs release: 1.el9 source: rpm version: 1.6.3 kmod: - arch: x86_64 epoch: null name: kmod release: 11.el9 source: rpm version: '28' kmod-libs: - arch: x86_64 epoch: null name: kmod-libs release: 11.el9 source: rpm version: '28' kpartx: - arch: x86_64 epoch: null name: kpartx release: 39.el9 source: rpm version: 0.8.7 krb5-libs: - arch: x86_64 epoch: null name: krb5-libs release: 8.el9 source: rpm version: 1.21.1 langpacks-core-en_GB: - arch: noarch epoch: null name: langpacks-core-en_GB release: 16.el9 source: rpm version: '3.0' langpacks-core-font-en: - arch: noarch epoch: null name: langpacks-core-font-en release: 16.el9 source: rpm version: '3.0' langpacks-en_GB: - arch: noarch epoch: null name: langpacks-en_GB release: 16.el9 source: rpm version: '3.0' less: - arch: x86_64 epoch: null name: less release: 6.el9 source: rpm version: '590' libacl: - arch: x86_64 epoch: null name: libacl release: 4.el9 source: rpm version: 2.3.1 libappstream-glib: - arch: x86_64 epoch: null name: libappstream-glib release: 5.el9 source: rpm version: 0.7.18 libarchive: - arch: x86_64 epoch: null name: libarchive release: 6.el9 source: rpm version: 3.5.3 libassuan: - arch: x86_64 epoch: null name: libassuan release: 3.el9 source: rpm version: 2.5.5 libatomic: - arch: x86_64 epoch: null name: libatomic release: 11.el9 source: rpm version: 11.5.0 libattr: - arch: x86_64 epoch: null name: libattr release: 3.el9 source: rpm version: 2.5.1 libbasicobjects: - arch: x86_64 epoch: null name: libbasicobjects release: 53.el9 source: rpm version: 0.1.1 libblkid: - arch: x86_64 epoch: null name: libblkid release: 21.el9 source: rpm version: 2.37.4 libbpf: - arch: x86_64 epoch: 2 name: libbpf release: 2.el9 source: rpm version: 1.5.0 libbrotli: - arch: x86_64 epoch: null name: libbrotli release: 7.el9 source: rpm version: 1.0.9 libburn: - arch: x86_64 epoch: null name: libburn release: 5.el9 source: rpm version: 1.5.4 libcap: - arch: x86_64 epoch: null name: libcap release: 10.el9 source: rpm version: '2.48' libcap-ng: - arch: x86_64 epoch: null name: libcap-ng release: 7.el9 source: rpm version: 0.8.2 libcbor: - arch: x86_64 epoch: null name: libcbor release: 5.el9 source: rpm version: 0.7.0 libcollection: - arch: x86_64 epoch: null name: libcollection release: 53.el9 source: rpm version: 0.7.0 libcom_err: - arch: x86_64 epoch: null name: libcom_err release: 8.el9 source: rpm version: 1.46.5 libcomps: - arch: x86_64 epoch: null name: libcomps release: 1.el9 source: rpm version: 0.1.18 libcurl: - arch: x86_64 epoch: null name: libcurl release: 34.el9 source: rpm version: 7.76.1 libdaemon: - arch: x86_64 epoch: null name: libdaemon release: 23.el9 source: rpm version: '0.14' libdb: - arch: x86_64 epoch: null name: libdb release: 57.el9 source: rpm version: 5.3.28 libdhash: - arch: x86_64 epoch: null name: libdhash release: 53.el9 source: rpm version: 0.5.0 libdnf: - arch: x86_64 epoch: null name: libdnf release: 16.el9 source: rpm version: 0.69.0 libeconf: - arch: x86_64 epoch: null name: libeconf release: 4.el9 source: rpm version: 0.4.1 libedit: - arch: x86_64 epoch: null name: libedit release: 38.20210216cvs.el9 source: rpm version: '3.1' libestr: - arch: x86_64 epoch: null name: libestr release: 4.el9 source: rpm version: 0.1.11 libev: - arch: x86_64 epoch: null name: libev release: 6.el9 source: rpm version: '4.33' libevent: - arch: x86_64 epoch: null name: libevent release: 8.el9 source: rpm version: 2.1.12 libfastjson: - arch: x86_64 epoch: null name: libfastjson release: 5.el9 source: rpm version: 0.99.9 libfdisk: - arch: x86_64 epoch: null name: libfdisk release: 21.el9 source: rpm version: 2.37.4 libffi: - arch: x86_64 epoch: null name: libffi release: 8.el9 source: rpm version: 3.4.2 libffi-devel: - arch: x86_64 epoch: null name: libffi-devel release: 8.el9 source: rpm version: 3.4.2 libfido2: - arch: x86_64 epoch: null name: libfido2 release: 2.el9 source: rpm version: 1.13.0 libgcc: - arch: x86_64 epoch: null name: libgcc release: 11.el9 source: rpm version: 11.5.0 libgcrypt: - arch: x86_64 epoch: null name: libgcrypt release: 11.el9 source: rpm version: 1.10.0 libgomp: - arch: x86_64 epoch: null name: libgomp release: 11.el9 source: rpm version: 11.5.0 libgpg-error: - arch: x86_64 epoch: null name: libgpg-error release: 5.el9 source: rpm version: '1.42' libgpg-error-devel: - arch: x86_64 epoch: null name: libgpg-error-devel release: 5.el9 source: rpm version: '1.42' libibverbs: - arch: x86_64 epoch: null name: libibverbs release: 2.el9 source: rpm version: '57.0' libicu: - arch: x86_64 epoch: null name: libicu release: 10.el9 source: rpm version: '67.1' libidn2: - arch: x86_64 epoch: null name: libidn2 release: 7.el9 source: rpm version: 2.3.0 libini_config: - arch: x86_64 epoch: null name: libini_config release: 53.el9 source: rpm version: 1.3.1 libisoburn: - arch: x86_64 epoch: null name: libisoburn release: 5.el9 source: rpm version: 1.5.4 libisofs: - arch: x86_64 epoch: null name: libisofs release: 4.el9 source: rpm version: 1.5.4 libjpeg-turbo: - arch: x86_64 epoch: null name: libjpeg-turbo release: 7.el9 source: rpm version: 2.0.90 libkcapi: - arch: x86_64 epoch: null name: libkcapi release: 2.el9 source: rpm version: 1.4.0 libkcapi-hmaccalc: - arch: x86_64 epoch: null name: libkcapi-hmaccalc release: 2.el9 source: rpm version: 1.4.0 libksba: - arch: x86_64 epoch: null name: libksba release: 7.el9 source: rpm version: 1.5.1 libldb: - arch: x86_64 epoch: 0 name: libldb release: 6.el9 source: rpm version: 4.22.4 libmaxminddb: - arch: x86_64 epoch: null name: libmaxminddb release: 4.el9 source: rpm version: 1.5.2 libmnl: - arch: x86_64 epoch: null name: libmnl release: 16.el9 source: rpm version: 1.0.4 libmodulemd: - arch: x86_64 epoch: null name: libmodulemd release: 2.el9 source: rpm version: 2.13.0 libmount: - arch: x86_64 epoch: null name: libmount release: 21.el9 source: rpm version: 2.37.4 libmpc: - arch: x86_64 epoch: null name: libmpc release: 4.el9 source: rpm version: 1.2.1 libndp: - arch: x86_64 epoch: null name: libndp release: 1.el9 source: rpm version: '1.9' libnet: - arch: x86_64 epoch: null name: libnet release: 7.el9 source: rpm version: '1.2' libnetfilter_conntrack: - arch: x86_64 epoch: null name: libnetfilter_conntrack release: 1.el9 source: rpm version: 1.0.9 libnfnetlink: - arch: x86_64 epoch: null name: libnfnetlink release: 23.el9 source: rpm version: 1.0.1 libnfsidmap: - arch: x86_64 epoch: 1 name: libnfsidmap release: 39.el9 source: rpm version: 2.5.4 libnftnl: - arch: x86_64 epoch: null name: libnftnl release: 4.el9 source: rpm version: 1.2.6 libnghttp2: - arch: x86_64 epoch: null name: libnghttp2 release: 6.el9 source: rpm version: 1.43.0 libnl3: - arch: x86_64 epoch: null name: libnl3 release: 1.el9 source: rpm version: 3.11.0 libnl3-cli: - arch: x86_64 epoch: null name: libnl3-cli release: 1.el9 source: rpm version: 3.11.0 libosinfo: - arch: x86_64 epoch: null name: libosinfo release: 1.el9 source: rpm version: 1.10.0 libpath_utils: - arch: x86_64 epoch: null name: libpath_utils release: 53.el9 source: rpm version: 0.2.1 libpcap: - arch: x86_64 epoch: 14 name: libpcap release: 4.el9 source: rpm version: 1.10.0 libpipeline: - arch: x86_64 epoch: null name: libpipeline release: 4.el9 source: rpm version: 1.5.3 libpkgconf: - arch: x86_64 epoch: null name: libpkgconf release: 10.el9 source: rpm version: 1.7.3 libpng: - arch: x86_64 epoch: 2 name: libpng release: 12.el9 source: rpm version: 1.6.37 libproxy: - arch: x86_64 epoch: null name: libproxy release: 35.el9 source: rpm version: 0.4.15 libproxy-webkitgtk4: - arch: x86_64 epoch: null name: libproxy-webkitgtk4 release: 35.el9 source: rpm version: 0.4.15 libpsl: - arch: x86_64 epoch: null name: libpsl release: 5.el9 source: rpm version: 0.21.1 libpwquality: - arch: x86_64 epoch: null name: libpwquality release: 8.el9 source: rpm version: 1.4.4 libref_array: - arch: x86_64 epoch: null name: libref_array release: 53.el9 source: rpm version: 0.1.5 librepo: - arch: x86_64 epoch: null name: librepo release: 3.el9 source: rpm version: 1.14.5 libreport-filesystem: - arch: noarch epoch: null name: libreport-filesystem release: 6.el9 source: rpm version: 2.15.2 libseccomp: - arch: x86_64 epoch: null name: libseccomp release: 2.el9 source: rpm version: 2.5.2 libselinux: - arch: x86_64 epoch: null name: libselinux release: 3.el9 source: rpm version: '3.6' libselinux-utils: - arch: x86_64 epoch: null name: libselinux-utils release: 3.el9 source: rpm version: '3.6' libsemanage: - arch: x86_64 epoch: null name: libsemanage release: 5.el9 source: rpm version: '3.6' libsepol: - arch: x86_64 epoch: null name: libsepol release: 3.el9 source: rpm version: '3.6' libsigsegv: - arch: x86_64 epoch: null name: libsigsegv release: 4.el9 source: rpm version: '2.13' libslirp: - arch: x86_64 epoch: null name: libslirp release: 8.el9 source: rpm version: 4.4.0 libsmartcols: - arch: x86_64 epoch: null name: libsmartcols release: 21.el9 source: rpm version: 2.37.4 libsolv: - arch: x86_64 epoch: null name: libsolv release: 3.el9 source: rpm version: 0.7.24 libsoup: - arch: x86_64 epoch: null name: libsoup release: 10.el9 source: rpm version: 2.72.0 libss: - arch: x86_64 epoch: null name: libss release: 8.el9 source: rpm version: 1.46.5 libssh: - arch: x86_64 epoch: null name: libssh release: 13.el9 source: rpm version: 0.10.4 libssh-config: - arch: noarch epoch: null name: libssh-config release: 13.el9 source: rpm version: 0.10.4 libsss_certmap: - arch: x86_64 epoch: null name: libsss_certmap release: 4.el9 source: rpm version: 2.9.7 libsss_idmap: - arch: x86_64 epoch: null name: libsss_idmap release: 4.el9 source: rpm version: 2.9.7 libsss_nss_idmap: - arch: x86_64 epoch: null name: libsss_nss_idmap release: 4.el9 source: rpm version: 2.9.7 libsss_sudo: - arch: x86_64 epoch: null name: libsss_sudo release: 4.el9 source: rpm version: 2.9.7 libstdc++: - arch: x86_64 epoch: null name: libstdc++ release: 11.el9 source: rpm version: 11.5.0 libstdc++-devel: - arch: x86_64 epoch: null name: libstdc++-devel release: 11.el9 source: rpm version: 11.5.0 libstemmer: - arch: x86_64 epoch: null name: libstemmer release: 18.585svn.el9 source: rpm version: '0' libsysfs: - arch: x86_64 epoch: null name: libsysfs release: 11.el9 source: rpm version: 2.1.1 libtalloc: - arch: x86_64 epoch: null name: libtalloc release: 1.el9 source: rpm version: 2.4.3 libtasn1: - arch: x86_64 epoch: null name: libtasn1 release: 9.el9 source: rpm version: 4.16.0 libtdb: - arch: x86_64 epoch: null name: libtdb release: 1.el9 source: rpm version: 1.4.13 libteam: - arch: x86_64 epoch: null name: libteam release: 16.el9 source: rpm version: '1.31' libtevent: - arch: x86_64 epoch: null name: libtevent release: 1.el9 source: rpm version: 0.16.2 libtirpc: - arch: x86_64 epoch: null name: libtirpc release: 9.el9 source: rpm version: 1.3.3 libtool-ltdl: - arch: x86_64 epoch: null name: libtool-ltdl release: 46.el9 source: rpm version: 2.4.6 libunistring: - arch: x86_64 epoch: null name: libunistring release: 15.el9 source: rpm version: 0.9.10 liburing: - arch: x86_64 epoch: null name: liburing release: 1.el9 source: rpm version: '2.5' libuser: - arch: x86_64 epoch: null name: libuser release: 17.el9 source: rpm version: '0.63' libutempter: - arch: x86_64 epoch: null name: libutempter release: 6.el9 source: rpm version: 1.2.1 libuuid: - arch: x86_64 epoch: null name: libuuid release: 21.el9 source: rpm version: 2.37.4 libverto: - arch: x86_64 epoch: null name: libverto release: 3.el9 source: rpm version: 0.3.2 libverto-libev: - arch: x86_64 epoch: null name: libverto-libev release: 3.el9 source: rpm version: 0.3.2 libvirt-client: - arch: x86_64 epoch: null name: libvirt-client release: 15.el9 source: rpm version: 10.10.0 libvirt-libs: - arch: x86_64 epoch: null name: libvirt-libs release: 15.el9 source: rpm version: 10.10.0 libwbclient: - arch: x86_64 epoch: 0 name: libwbclient release: 6.el9 source: rpm version: 4.22.4 libxcrypt: - arch: x86_64 epoch: null name: libxcrypt release: 3.el9 source: rpm version: 4.4.18 libxcrypt-compat: - arch: x86_64 epoch: null name: libxcrypt-compat release: 3.el9 source: rpm version: 4.4.18 libxcrypt-devel: - arch: x86_64 epoch: null name: libxcrypt-devel release: 3.el9 source: rpm version: 4.4.18 libxml2: - arch: x86_64 epoch: null name: libxml2 release: 12.el9 source: rpm version: 2.9.13 libxml2-devel: - arch: x86_64 epoch: null name: libxml2-devel release: 12.el9 source: rpm version: 2.9.13 libxslt: - arch: x86_64 epoch: null name: libxslt release: 12.el9 source: rpm version: 1.1.34 libxslt-devel: - arch: x86_64 epoch: null name: libxslt-devel release: 12.el9 source: rpm version: 1.1.34 libyaml: - arch: x86_64 epoch: null name: libyaml release: 7.el9 source: rpm version: 0.2.5 libzstd: - arch: x86_64 epoch: null name: libzstd release: 1.el9 source: rpm version: 1.5.5 llvm-filesystem: - arch: x86_64 epoch: null name: llvm-filesystem release: 3.el9 source: rpm version: 20.1.8 llvm-libs: - arch: x86_64 epoch: null name: llvm-libs release: 3.el9 source: rpm version: 20.1.8 lmdb-libs: - arch: x86_64 epoch: null name: lmdb-libs release: 3.el9 source: rpm version: 0.9.29 logrotate: - arch: x86_64 epoch: null name: logrotate release: 12.el9 source: rpm version: 3.18.0 lshw: - arch: x86_64 epoch: null name: lshw release: 2.el9 source: rpm version: B.02.20 lsscsi: - arch: x86_64 epoch: null name: lsscsi release: 6.el9 source: rpm version: '0.32' lua-libs: - arch: x86_64 epoch: null name: lua-libs release: 4.el9 source: rpm version: 5.4.4 lua-srpm-macros: - arch: noarch epoch: null name: lua-srpm-macros release: 6.el9 source: rpm version: '1' lz4-libs: - arch: x86_64 epoch: null name: lz4-libs release: 5.el9 source: rpm version: 1.9.3 lzo: - arch: x86_64 epoch: null name: lzo release: 7.el9 source: rpm version: '2.10' make: - arch: x86_64 epoch: 1 name: make release: 8.el9 source: rpm version: '4.3' man-db: - arch: x86_64 epoch: null name: man-db release: 9.el9 source: rpm version: 2.9.3 microcode_ctl: - arch: noarch epoch: 4 name: microcode_ctl release: 1.el9 source: rpm version: '20250812' mpfr: - arch: x86_64 epoch: null name: mpfr release: 7.el9 source: rpm version: 4.1.0 ncurses: - arch: x86_64 epoch: null name: ncurses release: 12.20210508.el9 source: rpm version: '6.2' ncurses-base: - arch: noarch epoch: null name: ncurses-base release: 12.20210508.el9 source: rpm version: '6.2' ncurses-c++-libs: - arch: x86_64 epoch: null name: ncurses-c++-libs release: 12.20210508.el9 source: rpm version: '6.2' ncurses-devel: - arch: x86_64 epoch: null name: ncurses-devel release: 12.20210508.el9 source: rpm version: '6.2' ncurses-libs: - arch: x86_64 epoch: null name: ncurses-libs release: 12.20210508.el9 source: rpm version: '6.2' netavark: - arch: x86_64 epoch: 2 name: netavark release: 1.el9 source: rpm version: 1.16.0 nettle: - arch: x86_64 epoch: null name: nettle release: 1.el9 source: rpm version: 3.10.1 newt: - arch: x86_64 epoch: null name: newt release: 11.el9 source: rpm version: 0.52.21 nfs-utils: - arch: x86_64 epoch: 1 name: nfs-utils release: 39.el9 source: rpm version: 2.5.4 nftables: - arch: x86_64 epoch: 1 name: nftables release: 4.el9 source: rpm version: 1.0.9 npth: - arch: x86_64 epoch: null name: npth release: 8.el9 source: rpm version: '1.6' numactl-libs: - arch: x86_64 epoch: null name: numactl-libs release: 3.el9 source: rpm version: 2.0.19 ocaml-srpm-macros: - arch: noarch epoch: null name: ocaml-srpm-macros release: 6.el9 source: rpm version: '6' oddjob: - arch: x86_64 epoch: null name: oddjob release: 7.el9 source: rpm version: 0.34.7 oddjob-mkhomedir: - arch: x86_64 epoch: null name: oddjob-mkhomedir release: 7.el9 source: rpm version: 0.34.7 oniguruma: - arch: x86_64 epoch: null name: oniguruma release: 1.el9.6 source: rpm version: 6.9.6 openblas-srpm-macros: - arch: noarch epoch: null name: openblas-srpm-macros release: 11.el9 source: rpm version: '2' openldap: - arch: x86_64 epoch: null name: openldap release: 4.el9 source: rpm version: 2.6.8 openldap-devel: - arch: x86_64 epoch: null name: openldap-devel release: 4.el9 source: rpm version: 2.6.8 openssh: - arch: x86_64 epoch: null name: openssh release: 1.el9 source: rpm version: 9.9p1 openssh-clients: - arch: x86_64 epoch: null name: openssh-clients release: 1.el9 source: rpm version: 9.9p1 openssh-server: - arch: x86_64 epoch: null name: openssh-server release: 1.el9 source: rpm version: 9.9p1 openssl: - arch: x86_64 epoch: 1 name: openssl release: 5.el9 source: rpm version: 3.5.1 openssl-devel: - arch: x86_64 epoch: 1 name: openssl-devel release: 5.el9 source: rpm version: 3.5.1 openssl-fips-provider: - arch: x86_64 epoch: 1 name: openssl-fips-provider release: 5.el9 source: rpm version: 3.5.1 openssl-libs: - arch: x86_64 epoch: 1 name: openssl-libs release: 5.el9 source: rpm version: 3.5.1 os-prober: - arch: x86_64 epoch: null name: os-prober release: 12.el9 source: rpm version: '1.77' osinfo-db: - arch: noarch epoch: null name: osinfo-db release: 1.el9 source: rpm version: '20250606' osinfo-db-tools: - arch: x86_64 epoch: null name: osinfo-db-tools release: 1.el9 source: rpm version: 1.10.0 p11-kit: - arch: x86_64 epoch: null name: p11-kit release: 1.el9 source: rpm version: 0.25.10 p11-kit-trust: - arch: x86_64 epoch: null name: p11-kit-trust release: 1.el9 source: rpm version: 0.25.10 pam: - arch: x86_64 epoch: null name: pam release: 26.el9 source: rpm version: 1.5.1 parted: - arch: x86_64 epoch: null name: parted release: 3.el9 source: rpm version: '3.5' passt: - arch: x86_64 epoch: null name: passt release: 2.el9 source: rpm version: 0^20250512.g8ec1341 passt-selinux: - arch: noarch epoch: null name: passt-selinux release: 2.el9 source: rpm version: 0^20250512.g8ec1341 passwd: - arch: x86_64 epoch: null name: passwd release: 12.el9 source: rpm version: '0.80' patch: - arch: x86_64 epoch: null name: patch release: 16.el9 source: rpm version: 2.7.6 pciutils-libs: - arch: x86_64 epoch: null name: pciutils-libs release: 7.el9 source: rpm version: 3.7.0 pcre: - arch: x86_64 epoch: null name: pcre release: 4.el9 source: rpm version: '8.44' pcre2: - arch: x86_64 epoch: null name: pcre2 release: 6.el9 source: rpm version: '10.40' pcre2-syntax: - arch: noarch epoch: null name: pcre2-syntax release: 6.el9 source: rpm version: '10.40' perl-AutoLoader: - arch: noarch epoch: 0 name: perl-AutoLoader release: 483.el9 source: rpm version: '5.74' perl-B: - arch: x86_64 epoch: 0 name: perl-B release: 483.el9 source: rpm version: '1.80' perl-Carp: - arch: noarch epoch: null name: perl-Carp release: 460.el9 source: rpm version: '1.50' perl-Class-Struct: - arch: noarch epoch: 0 name: perl-Class-Struct release: 483.el9 source: rpm version: '0.66' perl-Data-Dumper: - arch: x86_64 epoch: null name: perl-Data-Dumper release: 462.el9 source: rpm version: '2.174' perl-Digest: - arch: noarch epoch: null name: perl-Digest release: 4.el9 source: rpm version: '1.19' perl-Digest-MD5: - arch: x86_64 epoch: null name: perl-Digest-MD5 release: 4.el9 source: rpm version: '2.58' perl-DynaLoader: - arch: x86_64 epoch: 0 name: perl-DynaLoader release: 483.el9 source: rpm version: '1.47' perl-Encode: - arch: x86_64 epoch: 4 name: perl-Encode release: 462.el9 source: rpm version: '3.08' perl-Errno: - arch: x86_64 epoch: 0 name: perl-Errno release: 483.el9 source: rpm version: '1.30' perl-Error: - arch: noarch epoch: 1 name: perl-Error release: 7.el9 source: rpm version: '0.17029' perl-Exporter: - arch: noarch epoch: null name: perl-Exporter release: 461.el9 source: rpm version: '5.74' perl-Fcntl: - arch: x86_64 epoch: 0 name: perl-Fcntl release: 483.el9 source: rpm version: '1.13' perl-File-Basename: - arch: noarch epoch: 0 name: perl-File-Basename release: 483.el9 source: rpm version: '2.85' perl-File-Find: - arch: noarch epoch: 0 name: perl-File-Find release: 483.el9 source: rpm version: '1.37' perl-File-Path: - arch: noarch epoch: null name: perl-File-Path release: 4.el9 source: rpm version: '2.18' perl-File-Temp: - arch: noarch epoch: 1 name: perl-File-Temp release: 4.el9 source: rpm version: 0.231.100 perl-File-stat: - arch: noarch epoch: 0 name: perl-File-stat release: 483.el9 source: rpm version: '1.09' perl-FileHandle: - arch: noarch epoch: 0 name: perl-FileHandle release: 483.el9 source: rpm version: '2.03' perl-Getopt-Long: - arch: noarch epoch: 1 name: perl-Getopt-Long release: 4.el9 source: rpm version: '2.52' perl-Getopt-Std: - arch: noarch epoch: 0 name: perl-Getopt-Std release: 483.el9 source: rpm version: '1.12' perl-Git: - arch: noarch epoch: null name: perl-Git release: 1.el9 source: rpm version: 2.47.3 perl-HTTP-Tiny: - arch: noarch epoch: null name: perl-HTTP-Tiny release: 462.el9 source: rpm version: '0.076' perl-IO: - arch: x86_64 epoch: 0 name: perl-IO release: 483.el9 source: rpm version: '1.43' perl-IO-Socket-IP: - arch: noarch epoch: null name: perl-IO-Socket-IP release: 5.el9 source: rpm version: '0.41' perl-IO-Socket-SSL: - arch: noarch epoch: null name: perl-IO-Socket-SSL release: 2.el9 source: rpm version: '2.073' perl-IPC-Open3: - arch: noarch epoch: 0 name: perl-IPC-Open3 release: 483.el9 source: rpm version: '1.21' perl-MIME-Base64: - arch: x86_64 epoch: null name: perl-MIME-Base64 release: 4.el9 source: rpm version: '3.16' perl-Mozilla-CA: - arch: noarch epoch: null name: perl-Mozilla-CA release: 6.el9 source: rpm version: '20200520' perl-NDBM_File: - arch: x86_64 epoch: 0 name: perl-NDBM_File release: 483.el9 source: rpm version: '1.15' perl-Net-SSLeay: - arch: x86_64 epoch: null name: perl-Net-SSLeay release: 3.el9 source: rpm version: '1.94' perl-POSIX: - arch: x86_64 epoch: 0 name: perl-POSIX release: 483.el9 source: rpm version: '1.94' perl-PathTools: - arch: x86_64 epoch: null name: perl-PathTools release: 461.el9 source: rpm version: '3.78' perl-Pod-Escapes: - arch: noarch epoch: 1 name: perl-Pod-Escapes release: 460.el9 source: rpm version: '1.07' perl-Pod-Perldoc: - arch: noarch epoch: null name: perl-Pod-Perldoc release: 461.el9 source: rpm version: 3.28.01 perl-Pod-Simple: - arch: noarch epoch: 1 name: perl-Pod-Simple release: 4.el9 source: rpm version: '3.42' perl-Pod-Usage: - arch: noarch epoch: 4 name: perl-Pod-Usage release: 4.el9 source: rpm version: '2.01' perl-Scalar-List-Utils: - arch: x86_64 epoch: 4 name: perl-Scalar-List-Utils release: 462.el9 source: rpm version: '1.56' perl-SelectSaver: - arch: noarch epoch: 0 name: perl-SelectSaver release: 483.el9 source: rpm version: '1.02' perl-Socket: - arch: x86_64 epoch: 4 name: perl-Socket release: 4.el9 source: rpm version: '2.031' perl-Storable: - arch: x86_64 epoch: 1 name: perl-Storable release: 460.el9 source: rpm version: '3.21' perl-Symbol: - arch: noarch epoch: 0 name: perl-Symbol release: 483.el9 source: rpm version: '1.08' perl-Term-ANSIColor: - arch: noarch epoch: null name: perl-Term-ANSIColor release: 461.el9 source: rpm version: '5.01' perl-Term-Cap: - arch: noarch epoch: null name: perl-Term-Cap release: 460.el9 source: rpm version: '1.17' perl-TermReadKey: - arch: x86_64 epoch: null name: perl-TermReadKey release: 11.el9 source: rpm version: '2.38' perl-Text-ParseWords: - arch: noarch epoch: null name: perl-Text-ParseWords release: 460.el9 source: rpm version: '3.30' perl-Text-Tabs+Wrap: - arch: noarch epoch: null name: perl-Text-Tabs+Wrap release: 460.el9 source: rpm version: '2013.0523' perl-Time-Local: - arch: noarch epoch: 2 name: perl-Time-Local release: 7.el9 source: rpm version: '1.300' perl-URI: - arch: noarch epoch: null name: perl-URI release: 3.el9 source: rpm version: '5.09' perl-base: - arch: noarch epoch: 0 name: perl-base release: 483.el9 source: rpm version: '2.27' perl-constant: - arch: noarch epoch: null name: perl-constant release: 461.el9 source: rpm version: '1.33' perl-if: - arch: noarch epoch: 0 name: perl-if release: 483.el9 source: rpm version: 0.60.800 perl-interpreter: - arch: x86_64 epoch: 4 name: perl-interpreter release: 483.el9 source: rpm version: 5.32.1 perl-lib: - arch: x86_64 epoch: 0 name: perl-lib release: 483.el9 source: rpm version: '0.65' perl-libnet: - arch: noarch epoch: null name: perl-libnet release: 4.el9 source: rpm version: '3.13' perl-libs: - arch: x86_64 epoch: 4 name: perl-libs release: 483.el9 source: rpm version: 5.32.1 perl-mro: - arch: x86_64 epoch: 0 name: perl-mro release: 483.el9 source: rpm version: '1.23' perl-overload: - arch: noarch epoch: 0 name: perl-overload release: 483.el9 source: rpm version: '1.31' perl-overloading: - arch: noarch epoch: 0 name: perl-overloading release: 483.el9 source: rpm version: '0.02' perl-parent: - arch: noarch epoch: 1 name: perl-parent release: 460.el9 source: rpm version: '0.238' perl-podlators: - arch: noarch epoch: 1 name: perl-podlators release: 460.el9 source: rpm version: '4.14' perl-srpm-macros: - arch: noarch epoch: null name: perl-srpm-macros release: 41.el9 source: rpm version: '1' perl-subs: - arch: noarch epoch: 0 name: perl-subs release: 483.el9 source: rpm version: '1.03' perl-vars: - arch: noarch epoch: 0 name: perl-vars release: 483.el9 source: rpm version: '1.05' pigz: - arch: x86_64 epoch: null name: pigz release: 4.el9 source: rpm version: '2.5' pkgconf: - arch: x86_64 epoch: null name: pkgconf release: 10.el9 source: rpm version: 1.7.3 pkgconf-m4: - arch: noarch epoch: null name: pkgconf-m4 release: 10.el9 source: rpm version: 1.7.3 pkgconf-pkg-config: - arch: x86_64 epoch: null name: pkgconf-pkg-config release: 10.el9 source: rpm version: 1.7.3 podman: - arch: x86_64 epoch: 6 name: podman release: 2.el9 source: rpm version: 5.6.0 policycoreutils: - arch: x86_64 epoch: null name: policycoreutils release: 3.el9 source: rpm version: '3.6' policycoreutils-python-utils: - arch: noarch epoch: null name: policycoreutils-python-utils release: 3.el9 source: rpm version: '3.6' polkit: - arch: x86_64 epoch: null name: polkit release: 14.el9 source: rpm version: '0.117' polkit-libs: - arch: x86_64 epoch: null name: polkit-libs release: 14.el9 source: rpm version: '0.117' polkit-pkla-compat: - arch: x86_64 epoch: null name: polkit-pkla-compat release: 21.el9 source: rpm version: '0.1' popt: - arch: x86_64 epoch: null name: popt release: 8.el9 source: rpm version: '1.18' prefixdevname: - arch: x86_64 epoch: null name: prefixdevname release: 8.el9 source: rpm version: 0.1.0 procps-ng: - arch: x86_64 epoch: null name: procps-ng release: 14.el9 source: rpm version: 3.3.17 protobuf-c: - arch: x86_64 epoch: null name: protobuf-c release: 13.el9 source: rpm version: 1.3.3 psmisc: - arch: x86_64 epoch: null name: psmisc release: 3.el9 source: rpm version: '23.4' publicsuffix-list-dafsa: - arch: noarch epoch: null name: publicsuffix-list-dafsa release: 3.el9 source: rpm version: '20210518' pyproject-srpm-macros: - arch: noarch epoch: null name: pyproject-srpm-macros release: 1.el9 source: rpm version: 1.16.2 python-rpm-macros: - arch: noarch epoch: null name: python-rpm-macros release: 54.el9 source: rpm version: '3.9' python-srpm-macros: - arch: noarch epoch: null name: python-srpm-macros release: 54.el9 source: rpm version: '3.9' python-unversioned-command: - arch: noarch epoch: null name: python-unversioned-command release: 2.el9 source: rpm version: 3.9.23 python3: - arch: x86_64 epoch: null name: python3 release: 2.el9 source: rpm version: 3.9.23 python3-argcomplete: - arch: noarch epoch: null name: python3-argcomplete release: 5.el9 source: rpm version: 1.12.0 python3-attrs: - arch: noarch epoch: null name: python3-attrs release: 7.el9 source: rpm version: 20.3.0 python3-audit: - arch: x86_64 epoch: null name: python3-audit release: 7.el9 source: rpm version: 3.1.5 python3-babel: - arch: noarch epoch: null name: python3-babel release: 2.el9 source: rpm version: 2.9.1 python3-cffi: - arch: x86_64 epoch: null name: python3-cffi release: 5.el9 source: rpm version: 1.14.5 python3-chardet: - arch: noarch epoch: null name: python3-chardet release: 5.el9 source: rpm version: 4.0.0 python3-configobj: - arch: noarch epoch: null name: python3-configobj release: 25.el9 source: rpm version: 5.0.6 python3-cryptography: - arch: x86_64 epoch: null name: python3-cryptography release: 5.el9 source: rpm version: 36.0.1 python3-dasbus: - arch: noarch epoch: null name: python3-dasbus release: 1.el9 source: rpm version: '1.7' python3-dateutil: - arch: noarch epoch: 1 name: python3-dateutil release: 7.el9 source: rpm version: 2.8.1 python3-dbus: - arch: x86_64 epoch: null name: python3-dbus release: 2.el9 source: rpm version: 1.2.18 python3-devel: - arch: x86_64 epoch: null name: python3-devel release: 2.el9 source: rpm version: 3.9.23 python3-distro: - arch: noarch epoch: null name: python3-distro release: 7.el9 source: rpm version: 1.5.0 python3-dnf: - arch: noarch epoch: null name: python3-dnf release: 31.el9 source: rpm version: 4.14.0 python3-dnf-plugins-core: - arch: noarch epoch: null name: python3-dnf-plugins-core release: 23.el9 source: rpm version: 4.3.0 python3-enchant: - arch: noarch epoch: null name: python3-enchant release: 5.el9 source: rpm version: 3.2.0 python3-file-magic: - arch: noarch epoch: null name: python3-file-magic release: 16.el9 source: rpm version: '5.39' python3-gobject-base: - arch: x86_64 epoch: null name: python3-gobject-base release: 6.el9 source: rpm version: 3.40.1 python3-gobject-base-noarch: - arch: noarch epoch: null name: python3-gobject-base-noarch release: 6.el9 source: rpm version: 3.40.1 python3-gpg: - arch: x86_64 epoch: null name: python3-gpg release: 6.el9 source: rpm version: 1.15.1 python3-hawkey: - arch: x86_64 epoch: null name: python3-hawkey release: 16.el9 source: rpm version: 0.69.0 python3-idna: - arch: noarch epoch: null name: python3-idna release: 7.el9.1 source: rpm version: '2.10' python3-jinja2: - arch: noarch epoch: null name: python3-jinja2 release: 8.el9 source: rpm version: 2.11.3 python3-jmespath: - arch: noarch epoch: null name: python3-jmespath release: 11.el9 source: rpm version: 0.9.4 python3-jsonpatch: - arch: noarch epoch: null name: python3-jsonpatch release: 16.el9 source: rpm version: '1.21' python3-jsonpointer: - arch: noarch epoch: null name: python3-jsonpointer release: 4.el9 source: rpm version: '2.0' python3-jsonschema: - arch: noarch epoch: null name: python3-jsonschema release: 13.el9 source: rpm version: 3.2.0 python3-libcomps: - arch: x86_64 epoch: null name: python3-libcomps release: 1.el9 source: rpm version: 0.1.18 python3-libdnf: - arch: x86_64 epoch: null name: python3-libdnf release: 16.el9 source: rpm version: 0.69.0 python3-libs: - arch: x86_64 epoch: null name: python3-libs release: 2.el9 source: rpm version: 3.9.23 python3-libselinux: - arch: x86_64 epoch: null name: python3-libselinux release: 3.el9 source: rpm version: '3.6' python3-libsemanage: - arch: x86_64 epoch: null name: python3-libsemanage release: 5.el9 source: rpm version: '3.6' python3-libvirt: - arch: x86_64 epoch: null name: python3-libvirt release: 1.el9 source: rpm version: 10.10.0 python3-libxml2: - arch: x86_64 epoch: null name: python3-libxml2 release: 12.el9 source: rpm version: 2.9.13 python3-lxml: - arch: x86_64 epoch: null name: python3-lxml release: 3.el9 source: rpm version: 4.6.5 python3-markupsafe: - arch: x86_64 epoch: null name: python3-markupsafe release: 12.el9 source: rpm version: 1.1.1 python3-netaddr: - arch: noarch epoch: null name: python3-netaddr release: 3.el9 source: rpm version: 0.10.1 python3-netifaces: - arch: x86_64 epoch: null name: python3-netifaces release: 15.el9 source: rpm version: 0.10.6 python3-oauthlib: - arch: noarch epoch: null name: python3-oauthlib release: 5.el9 source: rpm version: 3.1.1 python3-packaging: - arch: noarch epoch: null name: python3-packaging release: 5.el9 source: rpm version: '20.9' python3-pexpect: - arch: noarch epoch: null name: python3-pexpect release: 7.el9 source: rpm version: 4.8.0 python3-pip: - arch: noarch epoch: null name: python3-pip release: 1.el9 source: rpm version: 21.3.1 python3-pip-wheel: - arch: noarch epoch: null name: python3-pip-wheel release: 1.el9 source: rpm version: 21.3.1 python3-ply: - arch: noarch epoch: null name: python3-ply release: 14.el9 source: rpm version: '3.11' python3-policycoreutils: - arch: noarch epoch: null name: python3-policycoreutils release: 3.el9 source: rpm version: '3.6' python3-prettytable: - arch: noarch epoch: null name: python3-prettytable release: 27.el9 source: rpm version: 0.7.2 python3-ptyprocess: - arch: noarch epoch: null name: python3-ptyprocess release: 12.el9 source: rpm version: 0.6.0 python3-pycparser: - arch: noarch epoch: null name: python3-pycparser release: 6.el9 source: rpm version: '2.20' python3-pyparsing: - arch: noarch epoch: null name: python3-pyparsing release: 9.el9 source: rpm version: 2.4.7 python3-pyrsistent: - arch: x86_64 epoch: null name: python3-pyrsistent release: 8.el9 source: rpm version: 0.17.3 python3-pyserial: - arch: noarch epoch: null name: python3-pyserial release: 12.el9 source: rpm version: '3.4' python3-pysocks: - arch: noarch epoch: null name: python3-pysocks release: 12.el9 source: rpm version: 1.7.1 python3-pytz: - arch: noarch epoch: null name: python3-pytz release: 5.el9 source: rpm version: '2021.1' python3-pyyaml: - arch: x86_64 epoch: null name: python3-pyyaml release: 6.el9 source: rpm version: 5.4.1 python3-requests: - arch: noarch epoch: null name: python3-requests release: 10.el9 source: rpm version: 2.25.1 python3-resolvelib: - arch: noarch epoch: null name: python3-resolvelib release: 5.el9 source: rpm version: 0.5.4 python3-rpm: - arch: x86_64 epoch: null name: python3-rpm release: 39.el9 source: rpm version: 4.16.1.3 python3-rpm-generators: - arch: noarch epoch: null name: python3-rpm-generators release: 9.el9 source: rpm version: '12' python3-rpm-macros: - arch: noarch epoch: null name: python3-rpm-macros release: 54.el9 source: rpm version: '3.9' python3-setools: - arch: x86_64 epoch: null name: python3-setools release: 1.el9 source: rpm version: 4.4.4 python3-setuptools: - arch: noarch epoch: null name: python3-setuptools release: 15.el9 source: rpm version: 53.0.0 python3-setuptools-wheel: - arch: noarch epoch: null name: python3-setuptools-wheel release: 15.el9 source: rpm version: 53.0.0 python3-six: - arch: noarch epoch: null name: python3-six release: 9.el9 source: rpm version: 1.15.0 python3-systemd: - arch: x86_64 epoch: null name: python3-systemd release: 19.el9 source: rpm version: '234' python3-urllib3: - arch: noarch epoch: null name: python3-urllib3 release: 6.el9 source: rpm version: 1.26.5 qemu-guest-agent: - arch: x86_64 epoch: 17 name: qemu-guest-agent release: 29.el9 source: rpm version: 9.1.0 qt5-srpm-macros: - arch: noarch epoch: null name: qt5-srpm-macros release: 1.el9 source: rpm version: 5.15.9 quota: - arch: x86_64 epoch: 1 name: quota release: 4.el9 source: rpm version: '4.09' quota-nls: - arch: noarch epoch: 1 name: quota-nls release: 4.el9 source: rpm version: '4.09' readline: - arch: x86_64 epoch: null name: readline release: 4.el9 source: rpm version: '8.1' readline-devel: - arch: x86_64 epoch: null name: readline-devel release: 4.el9 source: rpm version: '8.1' redhat-rpm-config: - arch: noarch epoch: null name: redhat-rpm-config release: 1.el9 source: rpm version: '210' rootfiles: - arch: noarch epoch: null name: rootfiles release: 35.el9 source: rpm version: '8.1' rpcbind: - arch: x86_64 epoch: null name: rpcbind release: 7.el9 source: rpm version: 1.2.6 rpm: - arch: x86_64 epoch: null name: rpm release: 39.el9 source: rpm version: 4.16.1.3 rpm-build: - arch: x86_64 epoch: null name: rpm-build release: 39.el9 source: rpm version: 4.16.1.3 rpm-build-libs: - arch: x86_64 epoch: null name: rpm-build-libs release: 39.el9 source: rpm version: 4.16.1.3 rpm-libs: - arch: x86_64 epoch: null name: rpm-libs release: 39.el9 source: rpm version: 4.16.1.3 rpm-plugin-audit: - arch: x86_64 epoch: null name: rpm-plugin-audit release: 39.el9 source: rpm version: 4.16.1.3 rpm-plugin-selinux: - arch: x86_64 epoch: null name: rpm-plugin-selinux release: 39.el9 source: rpm version: 4.16.1.3 rpm-plugin-systemd-inhibit: - arch: x86_64 epoch: null name: rpm-plugin-systemd-inhibit release: 39.el9 source: rpm version: 4.16.1.3 rpm-sign: - arch: x86_64 epoch: null name: rpm-sign release: 39.el9 source: rpm version: 4.16.1.3 rpm-sign-libs: - arch: x86_64 epoch: null name: rpm-sign-libs release: 39.el9 source: rpm version: 4.16.1.3 rpmlint: - arch: noarch epoch: null name: rpmlint release: 19.el9 source: rpm version: '1.11' rsync: - arch: x86_64 epoch: null name: rsync release: 3.el9 source: rpm version: 3.2.5 rsyslog: - arch: x86_64 epoch: null name: rsyslog release: 2.el9 source: rpm version: 8.2506.0 rsyslog-logrotate: - arch: x86_64 epoch: null name: rsyslog-logrotate release: 2.el9 source: rpm version: 8.2506.0 ruby: - arch: x86_64 epoch: null name: ruby release: 165.el9 source: rpm version: 3.0.7 ruby-default-gems: - arch: noarch epoch: null name: ruby-default-gems release: 165.el9 source: rpm version: 3.0.7 ruby-devel: - arch: x86_64 epoch: null name: ruby-devel release: 165.el9 source: rpm version: 3.0.7 ruby-libs: - arch: x86_64 epoch: null name: ruby-libs release: 165.el9 source: rpm version: 3.0.7 rubygem-bigdecimal: - arch: x86_64 epoch: null name: rubygem-bigdecimal release: 165.el9 source: rpm version: 3.0.0 rubygem-bundler: - arch: noarch epoch: null name: rubygem-bundler release: 165.el9 source: rpm version: 2.2.33 rubygem-io-console: - arch: x86_64 epoch: null name: rubygem-io-console release: 165.el9 source: rpm version: 0.5.7 rubygem-json: - arch: x86_64 epoch: null name: rubygem-json release: 165.el9 source: rpm version: 2.5.1 rubygem-psych: - arch: x86_64 epoch: null name: rubygem-psych release: 165.el9 source: rpm version: 3.3.2 rubygem-rdoc: - arch: noarch epoch: null name: rubygem-rdoc release: 165.el9 source: rpm version: 6.3.4.1 rubygems: - arch: noarch epoch: null name: rubygems release: 165.el9 source: rpm version: 3.2.33 rust-srpm-macros: - arch: noarch epoch: null name: rust-srpm-macros release: 4.el9 source: rpm version: '17' samba-client-libs: - arch: x86_64 epoch: 0 name: samba-client-libs release: 6.el9 source: rpm version: 4.22.4 samba-common: - arch: noarch epoch: 0 name: samba-common release: 6.el9 source: rpm version: 4.22.4 samba-common-libs: - arch: x86_64 epoch: 0 name: samba-common-libs release: 6.el9 source: rpm version: 4.22.4 sed: - arch: x86_64 epoch: null name: sed release: 9.el9 source: rpm version: '4.8' selinux-policy: - arch: noarch epoch: null name: selinux-policy release: 1.el9 source: rpm version: 38.1.65 selinux-policy-targeted: - arch: noarch epoch: null name: selinux-policy-targeted release: 1.el9 source: rpm version: 38.1.65 setroubleshoot-plugins: - arch: noarch epoch: null name: setroubleshoot-plugins release: 4.el9 source: rpm version: 3.3.14 setroubleshoot-server: - arch: x86_64 epoch: null name: setroubleshoot-server release: 2.el9 source: rpm version: 3.3.35 setup: - arch: noarch epoch: null name: setup release: 10.el9 source: rpm version: 2.13.7 sg3_utils: - arch: x86_64 epoch: null name: sg3_utils release: 10.el9 source: rpm version: '1.47' sg3_utils-libs: - arch: x86_64 epoch: null name: sg3_utils-libs release: 10.el9 source: rpm version: '1.47' shadow-utils: - arch: x86_64 epoch: 2 name: shadow-utils release: 15.el9 source: rpm version: '4.9' shadow-utils-subid: - arch: x86_64 epoch: 2 name: shadow-utils-subid release: 15.el9 source: rpm version: '4.9' shared-mime-info: - arch: x86_64 epoch: null name: shared-mime-info release: 5.el9 source: rpm version: '2.1' skopeo: - arch: x86_64 epoch: 2 name: skopeo release: 1.el9 source: rpm version: 1.20.0 slang: - arch: x86_64 epoch: null name: slang release: 11.el9 source: rpm version: 2.3.2 slirp4netns: - arch: x86_64 epoch: null name: slirp4netns release: 1.el9 source: rpm version: 1.3.3 snappy: - arch: x86_64 epoch: null name: snappy release: 8.el9 source: rpm version: 1.1.8 sos: - arch: noarch epoch: null name: sos release: 2.el9 source: rpm version: 4.10.0 sqlite: - arch: x86_64 epoch: null name: sqlite release: 8.el9 source: rpm version: 3.34.1 sqlite-libs: - arch: x86_64 epoch: null name: sqlite-libs release: 8.el9 source: rpm version: 3.34.1 squashfs-tools: - arch: x86_64 epoch: null name: squashfs-tools release: 10.git1.el9 source: rpm version: '4.4' sscg: - arch: x86_64 epoch: null name: sscg release: 10.el9 source: rpm version: 3.0.0 sshpass: - arch: x86_64 epoch: null name: sshpass release: 4.el9 source: rpm version: '1.09' sssd-client: - arch: x86_64 epoch: null name: sssd-client release: 4.el9 source: rpm version: 2.9.7 sssd-common: - arch: x86_64 epoch: null name: sssd-common release: 4.el9 source: rpm version: 2.9.7 sssd-kcm: - arch: x86_64 epoch: null name: sssd-kcm release: 4.el9 source: rpm version: 2.9.7 sssd-nfs-idmap: - arch: x86_64 epoch: null name: sssd-nfs-idmap release: 4.el9 source: rpm version: 2.9.7 sudo: - arch: x86_64 epoch: null name: sudo release: 13.el9 source: rpm version: 1.9.5p2 systemd: - arch: x86_64 epoch: null name: systemd release: 55.el9 source: rpm version: '252' systemd-devel: - arch: x86_64 epoch: null name: systemd-devel release: 55.el9 source: rpm version: '252' systemd-libs: - arch: x86_64 epoch: null name: systemd-libs release: 55.el9 source: rpm version: '252' systemd-pam: - arch: x86_64 epoch: null name: systemd-pam release: 55.el9 source: rpm version: '252' systemd-rpm-macros: - arch: noarch epoch: null name: systemd-rpm-macros release: 55.el9 source: rpm version: '252' systemd-udev: - arch: x86_64 epoch: null name: systemd-udev release: 55.el9 source: rpm version: '252' tar: - arch: x86_64 epoch: 2 name: tar release: 7.el9 source: rpm version: '1.34' tcl: - arch: x86_64 epoch: 1 name: tcl release: 7.el9 source: rpm version: 8.6.10 tcpdump: - arch: x86_64 epoch: 14 name: tcpdump release: 9.el9 source: rpm version: 4.99.0 teamd: - arch: x86_64 epoch: null name: teamd release: 16.el9 source: rpm version: '1.31' time: - arch: x86_64 epoch: null name: time release: 18.el9 source: rpm version: '1.9' tmux: - arch: x86_64 epoch: null name: tmux release: 5.el9 source: rpm version: 3.2a tpm2-tss: - arch: x86_64 epoch: null name: tpm2-tss release: 1.el9 source: rpm version: 3.2.3 traceroute: - arch: x86_64 epoch: 3 name: traceroute release: 1.el9 source: rpm version: 2.1.1 tzdata: - arch: noarch epoch: null name: tzdata release: 2.el9 source: rpm version: 2025b unzip: - arch: x86_64 epoch: null name: unzip release: 59.el9 source: rpm version: '6.0' userspace-rcu: - arch: x86_64 epoch: null name: userspace-rcu release: 6.el9 source: rpm version: 0.12.1 util-linux: - arch: x86_64 epoch: null name: util-linux release: 21.el9 source: rpm version: 2.37.4 util-linux-core: - arch: x86_64 epoch: null name: util-linux-core release: 21.el9 source: rpm version: 2.37.4 vim-minimal: - arch: x86_64 epoch: 2 name: vim-minimal release: 22.el9 source: rpm version: 8.2.2637 virt-install: - arch: noarch epoch: null name: virt-install release: 1.el9 source: rpm version: 5.0.0 virt-manager-common: - arch: noarch epoch: null name: virt-manager-common release: 1.el9 source: rpm version: 5.0.0 webkit2gtk3-jsc: - arch: x86_64 epoch: null name: webkit2gtk3-jsc release: 1.el9 source: rpm version: 2.48.5 wget: - arch: x86_64 epoch: null name: wget release: 8.el9 source: rpm version: 1.21.1 which: - arch: x86_64 epoch: null name: which release: 30.el9 source: rpm version: '2.21' xfsprogs: - arch: x86_64 epoch: null name: xfsprogs release: 7.el9 source: rpm version: 6.4.0 xmlstarlet: - arch: x86_64 epoch: null name: xmlstarlet release: 20.el9 source: rpm version: 1.6.1 xorriso: - arch: x86_64 epoch: null name: xorriso release: 5.el9 source: rpm version: 1.5.4 xz: - arch: x86_64 epoch: null name: xz release: 8.el9 source: rpm version: 5.2.5 xz-devel: - arch: x86_64 epoch: null name: xz-devel release: 8.el9 source: rpm version: 5.2.5 xz-libs: - arch: x86_64 epoch: null name: xz-libs release: 8.el9 source: rpm version: 5.2.5 yajl: - arch: x86_64 epoch: null name: yajl release: 25.el9 source: rpm version: 2.1.0 yum: - arch: noarch epoch: null name: yum release: 31.el9 source: rpm version: 4.14.0 yum-utils: - arch: noarch epoch: null name: yum-utils release: 23.el9 source: rpm version: 4.3.0 zip: - arch: x86_64 epoch: null name: zip release: 35.el9 source: rpm version: '3.0' zlib: - arch: x86_64 epoch: null name: zlib release: 41.el9 source: rpm version: 1.2.11 zlib-devel: - arch: x86_64 epoch: null name: zlib-devel release: 41.el9 source: rpm version: 1.2.11 zstd: - arch: x86_64 epoch: null name: zstd release: 1.el9 source: rpm version: 1.5.5 home/zuul/zuul-output/logs/ci-framework-data/artifacts/zuul_inventory.yml0000644000175000017500000015402515071030027026204 0ustar zuulzuulall: children: computes: hosts: compute-0: null compute-1: null ocps: hosts: crc: null zuul_unreachable: hosts: {} hosts: compute-0: ansible_connection: ssh ansible_host: 38.102.83.32 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 660cfe67-cb09-4d1b-96c1-30f05d27cde7 host_id: d19710e37f7b2620eb9f1bc9cfdfc06732b1f0c31221781941dd4533 interface_ip: 38.102.83.32 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.32 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.32 public_ipv6: '' region: RegionOne slot: null push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul_log_collection: false compute-1: ansible_connection: ssh ansible_host: 38.102.83.194 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: ff124b62-670a-418b-9791-f4b76e5224ee host_id: b012578aee5370fae73eb6c92c4679617335173cccca05390470f411 interface_ip: 38.102.83.194 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.194 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.194 public_ipv6: '' region: RegionOne slot: null push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul_log_collection: false controller: ansible_connection: ssh ansible_host: 38.102.83.51 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: e795da60-c8d6-4446-ba72-4fddfe4bf7ea host_id: 5519e7a0ee5dc826795d295efc9c908d171b61deb9bf71b1016f861f interface_ip: 38.102.83.51 label: cloud-centos-9-stream-tripleo-medium private_ipv4: 38.102.83.51 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.51 public_ipv6: '' region: RegionOne slot: null push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul_log_collection: false crc: ansible_connection: ssh ansible_host: 38.102.83.110 ansible_port: 22 ansible_python_interpreter: auto ansible_user: core cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: bfd057b4-b43d-4dc5-bc10-e91bf10a649b host_id: 7be6eb536a89b1266edff7cfa16e93a8ea0da5df2cfadeeb194a3ffc interface_ip: 38.102.83.110 label: coreos-crc-extracted-2-39-0-3xl private_ipv4: 38.102.83.110 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.110 public_ipv6: '' region: RegionOne slot: null push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul_log_collection: false localhost: ansible_connection: local vars: cifmw_artifacts_crc_sshkey: ~/.ssh/id_cifw cifmw_build_images_output: {} cifmw_dlrn_report_result: false cifmw_edpm_telemetry_enabled_exporters: - podman_exporter - openstack_network_exporter cifmw_extras: - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/multinode-ci.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/ci-framework'']. src_dir }}/scenarios/centos-9/horizon.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/scenarios/{{ watcher_scenario }}.yml' - '@{{ ansible_user_dir }}/{{ zuul.projects[''github.com/openstack-k8s-operators/watcher-operator'']. src_dir }}/ci/tests/watcher-tempest.yml' cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '123456789' cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_operator_build_output: operators: openstack-operator: git_commit_hash: 245af87e94976809f2023f59c19dffb95df97ed9 git_src_dir: ~/src/github.com/openstack-k8s-operators/openstack-operator image: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator:245af87e94976809f2023f59c19dffb95df97ed9 image_bundle: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-bundle:245af87e94976809f2023f59c19dffb95df97ed9 image_catalog: 38.102.83.53:5001/openstack-k8s-operators/openstack-operator-index:245af87e94976809f2023f59c19dffb95df97ed9 watcher-operator: git_commit_hash: 14377136e67c9cd67507a059bfde2f19f140387d git_src_dir: /home/zuul/src/github.com/openstack-k8s-operators/watcher-operator image: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator:14377136e67c9cd67507a059bfde2f19f140387d image_bundle: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-bundle:14377136e67c9cd67507a059bfde2f19f140387d image_catalog: 38.102.83.53:5001/openstack-k8s-operators/watcher-operator-index:14377136e67c9cd67507a059bfde2f19f140387d cifmw_test_operator_tempest_external_plugin: - changeRefspec: 380572db57798530b64dcac14c6b01b0382c5d8e changeRepository: https://review.opendev.org/openstack/watcher-tempest-plugin repository: https://opendev.org/openstack/watcher-tempest-plugin.git cifmw_test_operator_tempest_image_tag: watcher_latest cifmw_test_operator_tempest_namespace: '{{ content_provider_os_registry_url | split(''/'') | last }}' cifmw_test_operator_tempest_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_openstack: false cifmw_update_containers_org: podified-epoxy-centos9 cifmw_update_containers_registry: '{{ content_provider_os_registry_url | split(''/'') | first }}' cifmw_update_containers_tag: watcher_latest cifmw_update_containers_watcher: true cifmw_use_libvirt: false cifmw_zuul_target_host: controller content_provider_dlrn_md5_hash: '' content_provider_os_registry_url: 38.102.83.53:5001/podified-epoxy-centos9 content_provider_registry_ip: 38.102.83.53 crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: compute-0: networks: default: ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 tenant: config_nm: false ip: 172.19.0.100 compute-1: networks: default: ip: 192.168.122.101 internal-api: config_nm: false ip: 172.17.0.101 storage: config_nm: false ip: 172.18.0.101 tenant: config_nm: false ip: 172.19.0.101 controller: networks: default: ip: 192.168.122.11 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 tenant: ip: 172.19.0.5 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 tenant: range: 172.19.0.0/24 vlan: 22 enable_ramdisk: true fetch_dlrn_hash: false push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true watcher_scenario: edpm-no-notifications watcher_services_tag: watcher_latest watcher_tempest_max_microversion: '1.4' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 9ce4c11f9f6a4904bf6148a8276a3232 build_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null buildset: f9416ac601264548b137ce1f44fe627c buildset_refs: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 child_jobs: [] commit_id: 14377136e67c9cd67507a059bfde2f19f140387d event_id: 7dde6e80-a2f2-11f0-83f1-b4af7183f5ac executor: hostname: ze01.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/ansible/inventory.yaml log_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/logs result_data_file: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/results.json src_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work/src work_root: /var/lib/zuul/builds/9ce4c11f9f6a4904bf6148a8276a3232/work items: - branch: main change: '287' change_message: '[WIP] Move rabbitmq notifications queue to controlplane level Move rabbitmq notifications queue from enabling at nova/cinder/watcher level to openstack controlplane level after that usage is available since https://github.com/openstack-k8s-operators/openstack-operator/pull/1591' change_url: https://github.com/openstack-k8s-operators/watcher-operator/pull/287 commit_id: 14377136e67c9cd67507a059bfde2f19f140387d patchset: 14377136e67c9cd67507a059bfde2f19f140387d project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator topic: null job: watcher-operator-validation-epoxy-ocp4-16 jobtags: [] max_attempts: 1 message: W1dJUF0gTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIHRvIGNvbnRyb2xwbGFuZSBsZXZlbAoKTW92ZSByYWJiaXRtcSBub3RpZmljYXRpb25zIHF1ZXVlIGZyb20gZW5hYmxpbmcgYXQgbm92YS9jaW5kZXIvd2F0Y2hlciBsZXZlbCB0byBvcGVuc3RhY2sgY29udHJvbHBsYW5lIGxldmVsIGFmdGVyIHRoYXQgdXNhZ2UgaXMgYXZhaWxhYmxlIHNpbmNlIGh0dHBzOi8vZ2l0aHViLmNvbS9vcGVuc3RhY2stazhzLW9wZXJhdG9ycy9vcGVuc3RhY2stb3BlcmF0b3IvcHVsbC8xNTkx patchset: 14377136e67c9cd67507a059bfde2f19f140387d pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 941f6f7666fdff0145523beb29ceda8db25c234c untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d207d5ad1c5824d6db58c2eb5935a8b36674cbe4 playbooks: - path: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks/edpm/run.yml roles: - checkout: main checkout_description: playbook branch link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator name: openstack-k8s-operators/watcher-operator short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: f6ed2f2d118884a075895bbf954ff6000e540430 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: zuul branch commit: 35b8986b014c5316d873d58c20dfc131ae44aa83 name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/edpm-ansible: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/edpm-ansible checkout: main checkout_description: zuul branch commit: 95aa63de3182faad63a69301d101debad3efc936 name: openstack-k8s-operators/edpm-ansible required: true short_name: edpm-ansible src_dir: src/github.com/openstack-k8s-operators/edpm-ansible github.com/openstack-k8s-operators/infra-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/infra-operator checkout: main checkout_description: zuul branch commit: 2b5048bbcae44dfeaacbb43830318ca45c13f182 name: openstack-k8s-operators/infra-operator required: true short_name: infra-operator src_dir: src/github.com/openstack-k8s-operators/infra-operator github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: zuul branch commit: bb26118ddc70016cbd2118a0b0a35d5f6ab9c343 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls github.com/openstack-k8s-operators/openstack-baremetal-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-baremetal-operator checkout: main checkout_description: zuul branch commit: 3bf7652f010ead15ac2d2fec7e3b71c442b8fb8d name: openstack-k8s-operators/openstack-baremetal-operator required: true short_name: openstack-baremetal-operator src_dir: src/github.com/openstack-k8s-operators/openstack-baremetal-operator github.com/openstack-k8s-operators/openstack-must-gather: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-must-gather checkout: main checkout_description: zuul branch commit: 748dff8508cbb49e00426d46a4487b9f4c0b0096 name: openstack-k8s-operators/openstack-must-gather required: true short_name: openstack-must-gather src_dir: src/github.com/openstack-k8s-operators/openstack-must-gather github.com/openstack-k8s-operators/openstack-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/openstack-operator checkout: main checkout_description: zuul branch commit: 245af87e94976809f2023f59c19dffb95df97ed9 name: openstack-k8s-operators/openstack-operator required: true short_name: openstack-operator src_dir: src/github.com/openstack-k8s-operators/openstack-operator github.com/openstack-k8s-operators/repo-setup: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/repo-setup checkout: main checkout_description: zuul branch commit: 37b10946c6a10f9fa26c13305f06bfd6867e723f name: openstack-k8s-operators/repo-setup required: true short_name: repo-setup src_dir: src/github.com/openstack-k8s-operators/repo-setup github.com/openstack-k8s-operators/watcher-operator: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/watcher-operator checkout: main checkout_description: zuul branch commit: 14377136e67c9cd67507a059bfde2f19f140387d name: openstack-k8s-operators/watcher-operator required: false short_name: watcher-operator src_dir: src/github.com/openstack-k8s-operators/watcher-operator opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 3f62739c27168ebe05c65ba9b26a90fe6a6268df name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 941f6f7666fdff0145523beb29ceda8db25c234c name: config required: true short_name: config src_dir: src/review.rdoproject.org/config ref: refs/pull/287/head resources: {} tenant: rdoproject.org timeout: 10800 topic: null voting: true zuul_log_collection: false home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/0000755000175000017500000000000015071030056025067 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/delorean.repo.md50000644000175000017500000000004115071030055030225 0ustar zuulzuulb78cfc68a577b1553523c8a70a34e297 home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/delorean-antelope-testing.repo0000644000175000017500000000316315071030056033032 0ustar zuulzuul[delorean-antelope-testing] name=dlrn-antelope-testing baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [delorean-antelope-build-deps] name=dlrn-antelope-build-deps baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/build-deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-rabbitmq] name=centos9-rabbitmq baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/messaging/$basearch/rabbitmq-38/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-storage] name=centos9-storage baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/storage/$basearch/ceph-reef/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-opstools] name=centos9-opstools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/opstools/$basearch/collectd-5/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-nfv-ovs] name=NFV SIG OpenvSwitch baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/nfv/$basearch/openvswitch-2/ gpgcheck=0 enabled=1 module_hotfixes=1 # epel is required for Ceph Reef [epel-low-priority] name=Extra Packages for Enterprise Linux $releasever - $basearch metalink=https://mirrors.fedoraproject.org/metalink?repo=epel-$releasever&arch=$basearch&infra=$infra&content=$contentdir enabled=1 gpgcheck=0 countme=1 priority=100 includepkgs=libarrow*,parquet*,python3-asyncssh,re2,python3-grpcio,grpc*,abseil*,thrift* home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/delorean.repo0000644000175000017500000001336715071030056027561 0ustar zuulzuul[delorean-component-barbican] name=delorean-openstack-barbican-42b4c41831408a8e323fec3c8983b5c793b64874 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/barbican/42/b4/42b4c41831408a8e323fec3c8983b5c793b64874_08052e9d enabled=1 gpgcheck=0 priority=1 [delorean-component-baremetal] name=delorean-python-glean-10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/baremetal/10/df/10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7_36137eb3 enabled=1 gpgcheck=0 priority=1 [delorean-component-cinder] name=delorean-openstack-cinder-1c00d6490d88e436f26efb71f2ac96e75252e97c baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cinder/1c/00/1c00d6490d88e436f26efb71f2ac96e75252e97c_f716f000 enabled=1 gpgcheck=0 priority=1 [delorean-component-clients] name=delorean-python-stevedore-c4acc5639fd2329372142e39464fcca0209b0018 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/clients/c4/ac/c4acc5639fd2329372142e39464fcca0209b0018_d3ef8337 enabled=1 gpgcheck=0 priority=1 [delorean-component-cloudops] name=delorean-python-cloudkitty-tests-tempest-3961dcddb873b1ff6710d7df0739c4285dd71f8c baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cloudops/39/61/3961dcddb873b1ff6710d7df0739c4285dd71f8c_33e4dd93 enabled=1 gpgcheck=0 priority=1 [delorean-component-common] name=delorean-diskimage-builder-43381184423c185801b5e24f5f3e1e40bb7496f8 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/common/43/38/43381184423c185801b5e24f5f3e1e40bb7496f8_bf6d4aba enabled=1 gpgcheck=0 priority=1 [delorean-component-compute] name=delorean-openstack-nova-6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/compute/6f/8d/6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e_dc05b899 enabled=1 gpgcheck=0 priority=1 [delorean-component-designate] name=delorean-python-designate-tests-tempest-347fdbc9b4595a10b726526b3c0b5928e5b7fcf2 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/designate/34/7f/347fdbc9b4595a10b726526b3c0b5928e5b7fcf2_3fd39337 enabled=1 gpgcheck=0 priority=1 [delorean-component-glance] name=delorean-openstack-glance-1fd12c29b339f30fe823e2b5beba14b5f241e52a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/glance/1f/d1/1fd12c29b339f30fe823e2b5beba14b5f241e52a_0d693729 enabled=1 gpgcheck=0 priority=1 [delorean-component-keystone] name=delorean-openstack-keystone-e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/keystone/e4/b4/e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7_264c03cc enabled=1 gpgcheck=0 priority=1 [delorean-component-manila] name=delorean-openstack-manila-3c01b7181572c95dac462eb19c3121e36cb0fe95 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/manila/3c/01/3c01b7181572c95dac462eb19c3121e36cb0fe95_912dfd18 enabled=1 gpgcheck=0 priority=1 [delorean-component-network] name=delorean-python-vmware-nsxlib-458234972d1428ac92bbeff26511edfdc49b6b2f baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/network/45/82/458234972d1428ac92bbeff26511edfdc49b6b2f_1bca6328 enabled=1 gpgcheck=0 priority=1 [delorean-component-octavia] name=delorean-openstack-octavia-ba397f07a7331190208c93368ee23826ac4e2707 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/octavia/ba/39/ba397f07a7331190208c93368ee23826ac4e2707_9d6e596a enabled=1 gpgcheck=0 priority=1 [delorean-component-optimize] name=delorean-openstack-watcher-c014f81a8647287f6dcc339321c1256f5a2e82d5 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/optimize/c0/14/c014f81a8647287f6dcc339321c1256f5a2e82d5_bcbfdccc enabled=1 gpgcheck=0 priority=1 [delorean-component-podified] name=delorean-edpm-image-builder-55ba53cf215b14ed95bc80c8e8ed4b29a45fd4ae baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/podified/55/ba/55ba53cf215b14ed95bc80c8e8ed4b29a45fd4ae_419d1901 enabled=1 gpgcheck=0 priority=1 [delorean-component-puppet] name=delorean-puppet-ceph-b0c245ccde541a63fde0564366c6a8247cf9fb4f baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/puppet/b0/c2/b0c245ccde541a63fde0564366c6a8247cf9fb4f_7cde1ad1 enabled=1 gpgcheck=0 priority=1 [delorean-component-swift] name=delorean-openstack-swift-dc98a8463506ac520c469adb0ef47d0f7753905a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/swift/dc/98/dc98a8463506ac520c469adb0ef47d0f7753905a_9d02f069 enabled=1 gpgcheck=0 priority=1 [delorean-component-tempest] name=delorean-python-tempestconf-8515371b7cceebd4282e09f1d8f0cc842df82855 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/tempest/85/15/8515371b7cceebd4282e09f1d8f0cc842df82855_a1e336c7 enabled=1 gpgcheck=0 priority=1 [delorean-component-ui] name=delorean-openstack-heat-ui-013accbfd179753bc3f0d1f4e5bed07a4fd9f771 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/ui/01/3a/013accbfd179753bc3f0d1f4e5bed07a4fd9f771_0c88e467 enabled=1 gpgcheck=0 priority=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-appstream.repo0000644000175000017500000000031615071030056033344 0ustar zuulzuul [repo-setup-centos-appstream] name=repo-setup-centos-appstream baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/AppStream/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-baseos.repo0000644000175000017500000000030415071030056032621 0ustar zuulzuul [repo-setup-centos-baseos] name=repo-setup-centos-baseos baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/BaseOS/$basearch/os/ gpgcheck=0 enabled=1 ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-highavailability.repohome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-highavailabili0000644000175000017500000000034215071030056033340 0ustar zuulzuul [repo-setup-centos-highavailability] name=repo-setup-centos-highavailability baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/HighAvailability/$basearch/os/ gpgcheck=0 enabled=1 ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-powertools.repohome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-powertools.rep0000644000175000017500000000031115071030056033401 0ustar zuulzuul [repo-setup-centos-powertools] name=repo-setup-centos-powertools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/CRB/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/selinux-listing.log0000644000175000017500000033426115071030414020733 0ustar zuulzuul/home/zuul/ci-framework-data: total 8 drwxr-xr-x. 10 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Oct 6 21:13 artifacts drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Oct 6 21:13 logs drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 24 Oct 6 21:09 tmp drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:10 volumes /home/zuul/ci-framework-data/artifacts: total 924 drwxrwxrwx. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Oct 6 21:13 ansible_facts.2025-10-06_21-13 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 21057 Oct 6 21:12 ansible-facts.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 712716 Oct 6 21:12 ansible-vars.yml drwxr-xr-x. 2 root root unconfined_u:object_r:user_home_t:s0 33 Oct 6 21:12 ci-env -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 135 Oct 6 21:12 ci_script_000_check_for_oc.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 239 Oct 6 21:12 ci_script_000_copy_logs_from_crc.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 659 Oct 6 21:12 ci_script_000_prepare_root_ssh.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 947 Oct 6 21:10 ci_script_000_run_hook_without_retry.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 564 Oct 6 21:12 ci_script_000_run_openstack_must_gather.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 213 Oct 6 21:11 ci_script_001_fetch_openshift.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1110 Oct 6 21:11 ci_script_002_run_hook_without_retry_fetch.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1099 Oct 6 21:12 ci_script_003_run_hook_without_retry_80.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1118 Oct 6 21:12 ci_script_004_run_hook_without_retry_create.sh -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 159 Oct 6 21:12 hosts -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 78908 Oct 6 21:12 installed-packages.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 362 Oct 6 21:10 install_yamls.sh -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 1644 Oct 6 21:12 ip-network.txt drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 65 Oct 6 21:11 manifests drwxr-xr-x. 2 root root unconfined_u:object_r:user_home_t:s0 70 Oct 6 21:12 NetworkManager drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 120 Oct 6 21:13 parameters -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 345 Oct 6 21:11 post_infra_fetch_nodes_facts_and_save_the.yml drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Oct 6 21:10 repositories -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 106 Oct 6 21:12 resolv.conf drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Oct 6 21:10 roles drwxr-xr-x. 2 root root unconfined_u:object_r:user_home_t:s0 4096 Oct 6 21:12 yum_repos -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 55317 Oct 6 21:09 zuul_inventory.yml /home/zuul/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 57 Oct 6 21:13 ansible_facts_cache /home/zuul/ci-framework-data/artifacts/ansible_facts.2025-10-06_21-13/ansible_facts_cache: total 104 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 23431 Oct 6 21:13 compute-0 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 23435 Oct 6 21:13 compute-1 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 57331 Oct 6 21:13 localhost /home/zuul/ci-framework-data/artifacts/ci-env: total 4 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 2806 Oct 6 21:12 networking-info.yml /home/zuul/ci-framework-data/artifacts/manifests: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 39 Oct 6 21:13 cert-manager drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 43 Oct 6 21:11 kustomizations drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Oct 6 21:10 openstack /home/zuul/ci-framework-data/artifacts/manifests/cert-manager: total 464 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 472652 Oct 6 21:11 cert_manager_manifest.yml /home/zuul/ci-framework-data/artifacts/manifests/kustomizations: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 72 Oct 6 21:13 controlplane drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 35 Oct 6 21:13 dataplane /home/zuul/ci-framework-data/artifacts/manifests/kustomizations/controlplane: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 305 Oct 6 21:12 80-horizon-kustomization.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 402 Oct 6 21:11 99-kustomization.yaml /home/zuul/ci-framework-data/artifacts/manifests/kustomizations/dataplane: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 4003 Oct 6 21:11 99-kustomization.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:10 cr /home/zuul/ci-framework-data/artifacts/manifests/openstack/cr: total 0 /home/zuul/ci-framework-data/artifacts/NetworkManager: total 8 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 331 Oct 6 21:12 ci-private-network.nmconnection -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 178 Oct 6 21:12 ens3.nmconnection /home/zuul/ci-framework-data/artifacts/parameters: total 64 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 8572 Oct 6 21:10 custom-params.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 28156 Oct 6 21:13 install-yamls-params.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 280 Oct 6 21:11 openshift-login-params.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 19792 Oct 6 21:09 zuul-params.yml /home/zuul/ci-framework-data/artifacts/repositories: total 32 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1651 Oct 6 21:10 delorean-antelope-testing.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 5879 Oct 6 21:10 delorean.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Oct 6 21:10 delorean.repo.md5 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 206 Oct 6 21:10 repo-setup-centos-appstream.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 196 Oct 6 21:10 repo-setup-centos-baseos.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 226 Oct 6 21:10 repo-setup-centos-highavailability.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 201 Oct 6 21:10 repo-setup-centos-powertools.repo /home/zuul/ci-framework-data/artifacts/roles: total 0 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Oct 6 21:10 install_yamls_makes /home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes: total 20 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 16384 Oct 6 21:13 tasks /home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes/tasks: total 1256 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 790 Oct 6 21:10 make_all.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_ansibleee_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Oct 6 21:10 make_ansibleee_kuttl_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Oct 6 21:10 make_ansibleee_kuttl_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_ansibleee_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_ansibleee_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_ansibleee_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_ansibleee.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1234 Oct 6 21:10 make_attach_default_interface_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1114 Oct 6 21:10 make_attach_default_interface.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_barbican_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Oct 6 21:10 make_barbican_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Oct 6 21:10 make_barbican_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Oct 6 21:10 make_barbican_deploy_validate.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_barbican_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Oct 6 21:10 make_barbican_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_barbican_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_barbican_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 865 Oct 6 21:10 make_barbican.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_baremetal_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_baremetal_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_baremetal.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1219 Oct 6 21:10 make_bmaas_baremetal_net_nad_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1099 Oct 6 21:10 make_bmaas_baremetal_net_nad.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 949 Oct 6 21:10 make_bmaas_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1234 Oct 6 21:10 make_bmaas_crc_attach_network_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1114 Oct 6 21:10 make_bmaas_crc_attach_network.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1264 Oct 6 21:10 make_bmaas_crc_baremetal_bridge_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1144 Oct 6 21:10 make_bmaas_crc_baremetal_bridge.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1129 Oct 6 21:10 make_bmaas_generate_nodes_yaml.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1069 Oct 6 21:10 make_bmaas_metallb_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 949 Oct 6 21:10 make_bmaas_metallb.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1069 Oct 6 21:10 make_bmaas_network_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 949 Oct 6 21:10 make_bmaas_network.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1444 Oct 6 21:10 make_bmaas_route_crc_and_crc_bmaas_networks_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1324 Oct 6 21:10 make_bmaas_route_crc_and_crc_bmaas_networks.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1174 Oct 6 21:10 make_bmaas_sushy_emulator_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1129 Oct 6 21:10 make_bmaas_sushy_emulator_wait.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1054 Oct 6 21:10 make_bmaas_sushy_emulator.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1129 Oct 6 21:10 make_bmaas_virtual_bms_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1009 Oct 6 21:10 make_bmaas_virtual_bms.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 829 Oct 6 21:10 make_bmaas.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_ceph_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_ceph_help.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Oct 6 21:10 make_ceph.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_certmanager_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_certmanager.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 949 Oct 6 21:10 make_cifmw_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 949 Oct 6 21:10 make_cifmw_prepare.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_cinder_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_cinder_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Oct 6 21:10 make_cinder_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_cinder_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_cinder_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_cinder_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_cinder_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 835 Oct 6 21:10 make_cinder.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Oct 6 21:10 make_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1294 Oct 6 21:10 make_crc_attach_default_interface_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1174 Oct 6 21:10 make_crc_attach_default_interface.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_crc_bmo_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_crc_bmo_setup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 919 Oct 6 21:10 make_crc_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 889 Oct 6 21:10 make_crc_scrub.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1225 Oct 6 21:10 make_crc_storage_cleanup_with_retries.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_crc_storage_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_crc_storage_release.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Oct 6 21:10 make_crc_storage_with_retries.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_crc_storage.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 799 Oct 6 21:10 make_crc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_designate_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Oct 6 21:10 make_designate_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_designate_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_designate_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_designate_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_designate_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_designate_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_designate.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Oct 6 21:10 make_dns_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_dns_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 895 Oct 6 21:10 make_dns_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 964 Oct 6 21:10 make_download_tools.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1039 Oct 6 21:10 make_edpm_ansible_runner.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1084 Oct 6 21:10 make_edpm_baremetal_compute.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1024 Oct 6 21:10 make_edpm_compute_bootc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1054 Oct 6 21:10 make_edpm_compute_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1024 Oct 6 21:10 make_edpm_compute_repos.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1009 Oct 6 21:10 make_edpm_computes_bgp.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 934 Oct 6 21:10 make_edpm_compute.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1135 Oct 6 21:10 make_edpm_deploy_baremetal_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_edpm_deploy_baremetal.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_edpm_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1120 Oct 6 21:10 make_edpm_deploy_generate_keys.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1054 Oct 6 21:10 make_edpm_deploy_instance.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1180 Oct 6 21:10 make_edpm_deploy_networker_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1135 Oct 6 21:10 make_edpm_deploy_networker_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_edpm_deploy_networker.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_edpm_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_edpm_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1084 Oct 6 21:10 make_edpm_networker_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 964 Oct 6 21:10 make_edpm_networker.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Oct 6 21:10 make_edpm_nova_discover_hosts.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1210 Oct 6 21:10 make_edpm_patch_ansible_runner_image.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_edpm_register_dns.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1135 Oct 6 21:10 make_edpm_wait_deploy_baremetal.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_edpm_wait_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_glance_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_glance_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Oct 6 21:10 make_glance_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_glance_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_glance_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_glance_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_glance_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 835 Oct 6 21:10 make_glance.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_heat_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_heat_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_heat_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_heat_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_heat_kuttl_crc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_heat_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 895 Oct 6 21:10 make_heat_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_heat_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Oct 6 21:10 make_heat.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 814 Oct 6 21:10 make_help.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_horizon_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Oct 6 21:10 make_horizon_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_horizon_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_horizon_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_horizon_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_horizon_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_horizon_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Oct 6 21:10 make_horizon.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_infra_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_infra_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_infra_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 895 Oct 6 21:10 make_infra_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 820 Oct 6 21:10 make_infra.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_input_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 820 Oct 6 21:10 make_input.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 994 Oct 6 21:10 make_ipv6_lab_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1189 Oct 6 21:10 make_ipv6_lab_nat64_router_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1069 Oct 6 21:10 make_ipv6_lab_nat64_router.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1114 Oct 6 21:10 make_ipv6_lab_network_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 994 Oct 6 21:10 make_ipv6_lab_network.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1054 Oct 6 21:10 make_ipv6_lab_sno_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 934 Oct 6 21:10 make_ipv6_lab_sno.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 874 Oct 6 21:10 make_ipv6_lab.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_ironic_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_ironic_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Oct 6 21:10 make_ironic_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_ironic_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_ironic_kuttl_crc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_ironic_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_ironic_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_ironic_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 835 Oct 6 21:10 make_ironic.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_keystone_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Oct 6 21:10 make_keystone_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Oct 6 21:10 make_keystone_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_keystone_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Oct 6 21:10 make_keystone_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_keystone_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_keystone_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 865 Oct 6 21:10 make_keystone.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Oct 6 21:10 make_kuttl_common_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_kuttl_common_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_kuttl_db_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_kuttl_db_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_loki_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_loki_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_loki_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Oct 6 21:10 make_loki.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Oct 6 21:10 make_lvms.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_manila_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_manila_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Oct 6 21:10 make_manila_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_manila_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_manila_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_manila_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_manila_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 835 Oct 6 21:10 make_manila.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Oct 6 21:10 make_mariadb_chainsaw_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_mariadb_chainsaw.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_mariadb_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Oct 6 21:10 make_mariadb_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_mariadb_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_mariadb_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_mariadb_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_mariadb_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Oct 6 21:10 make_mariadb.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Oct 6 21:10 make_memcached_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_memcached_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_memcached_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_metallb_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Oct 6 21:10 make_metallb_config_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_metallb_config.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Oct 6 21:10 make_metallb.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_namespace_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_namespace.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_netattach_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_netattach.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Oct 6 21:10 make_netconfig_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_netconfig_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_netconfig_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_netobserv_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Oct 6 21:10 make_netobserv_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_netobserv_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_netobserv.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1234 Oct 6 21:10 make_network_isolation_bridge_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1114 Oct 6 21:10 make_network_isolation_bridge.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_neutron_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Oct 6 21:10 make_neutron_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_neutron_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_neutron_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_neutron_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_neutron_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_neutron_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Oct 6 21:10 make_neutron.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 919 Oct 6 21:10 make_nfs_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 799 Oct 6 21:10 make_nfs.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Oct 6 21:10 make_nmstate.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_nncp_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Oct 6 21:10 make_nncp.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_nova_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_nova_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_nova_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_nova_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_nova_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Oct 6 21:10 make_nova.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_octavia_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Oct 6 21:10 make_octavia_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_octavia_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_octavia_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_octavia_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_octavia_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_octavia_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 850 Oct 6 21:10 make_octavia.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_openstack_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1075 Oct 6 21:10 make_openstack_crds_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_openstack_crds.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Oct 6 21:10 make_openstack_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_openstack_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_openstack_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_openstack_init.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_openstack_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_openstack_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Oct 6 21:10 make_openstack_patch_version.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_openstack_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_openstack_repo.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Oct 6 21:10 make_openstack_update_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_openstack_wait_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_openstack_wait.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_openstack.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Oct 6 21:10 make_operator_namespace.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_ovn_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1015 Oct 6 21:10 make_ovn_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_ovn_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 895 Oct 6 21:10 make_ovn_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_ovn_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_ovn_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 865 Oct 6 21:10 make_ovn_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 790 Oct 6 21:10 make_ovn.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_placement_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Oct 6 21:10 make_placement_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_placement_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_placement_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_placement_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_placement_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_placement_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_placement.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_rabbitmq_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Oct 6 21:10 make_rabbitmq_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Oct 6 21:10 make_rabbitmq_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_rabbitmq_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_rabbitmq_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 865 Oct 6 21:10 make_rabbitmq.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Oct 6 21:10 make_redis_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_redis_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_redis_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_rook_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_rook_crc_disk.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_rook_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_rook_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_rook_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Oct 6 21:10 make_rook.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1090 Oct 6 21:10 make_set_slower_etcd_profile.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1024 Oct 6 21:10 make_standalone_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1009 Oct 6 21:10 make_standalone_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1009 Oct 6 21:10 make_standalone_revert.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1039 Oct 6 21:10 make_standalone_snapshot.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 979 Oct 6 21:10 make_standalone_sync.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 904 Oct 6 21:10 make_standalone.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_swift_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Oct 6 21:10 make_swift_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_swift_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 925 Oct 6 21:10 make_swift_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_swift_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 910 Oct 6 21:10 make_swift_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 895 Oct 6 21:10 make_swift_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 820 Oct 6 21:10 make_swift.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1000 Oct 6 21:10 make_telemetry_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1105 Oct 6 21:10 make_telemetry_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1060 Oct 6 21:10 make_telemetry_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 985 Oct 6 21:10 make_telemetry_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1030 Oct 6 21:10 make_telemetry_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_telemetry_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 955 Oct 6 21:10 make_telemetry_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 880 Oct 6 21:10 make_telemetry.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 964 Oct 6 21:10 make_tripleo_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 970 Oct 6 21:10 make_update_services.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 940 Oct 6 21:10 make_update_system.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1045 Oct 6 21:10 make_validate_marketplace.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 805 Oct 6 21:10 make_wait.yml /home/zuul/ci-framework-data/artifacts/yum_repos: total 32 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 1651 Oct 6 21:12 delorean-antelope-testing.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 5879 Oct 6 21:12 delorean.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 33 Oct 6 21:12 delorean.repo.md5 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 206 Oct 6 21:12 repo-setup-centos-appstream.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 196 Oct 6 21:12 repo-setup-centos-baseos.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 226 Oct 6 21:12 repo-setup-centos-highavailability.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 201 Oct 6 21:12 repo-setup-centos-powertools.repo /home/zuul/ci-framework-data/logs: total 560 drwxrwxr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 25 Oct 6 21:13 2025-10-06_21-12 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 164531 Oct 6 21:12 ansible.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 164024 Oct 6 21:12 ci_script_000_copy_logs_from_crc.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 154306 Oct 6 21:12 ci_script_000_prepare_root_ssh.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 7930 Oct 6 21:11 ci_script_000_run_hook_without_retry.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 4888 Oct 6 21:12 ci_script_000_run_openstack_must_gather.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 234 Oct 6 21:11 ci_script_001_fetch_openshift.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 11879 Oct 6 21:11 ci_script_002_run_hook_without_retry_fetch.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1498 Oct 6 21:12 ci_script_003_run_hook_without_retry_80.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2742 Oct 6 21:12 ci_script_004_run_hook_without_retry_create.log drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 32 Oct 6 21:12 crc drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 72 Oct 6 21:12 openstack-k8s-operators-openstack-must-gather -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 15637 Oct 6 21:11 post_infra_fetch_nodes_facts_and_save_the.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2208 Oct 6 21:12 pre_deploy_80_kustomize_openstack_cr.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3163 Oct 6 21:12 pre_deploy_create_coo_subscription.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 12550 Oct 6 21:11 pre_infra_download_needed_tools.log /home/zuul/ci-framework-data/logs/2025-10-06_21-12: total 164 -rw-rw-rw-. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 164531 Oct 6 21:12 ansible.log /home/zuul/ci-framework-data/logs/crc: total 0 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 18 Oct 6 21:12 crc-logs-artifacts /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts: total 16 drwxr-xr-x. 86 zuul zuul unconfined_u:object_r:user_home_t:s0 12288 Oct 6 21:12 pods /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods: total 12 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 37 Oct 6 21:12 cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 37 Oct 6 21:12 cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 34 Oct 6 21:12 cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10 drwxr-xr-x. 6 zuul zuul unconfined_u:object_r:user_home_t:s0 108 Oct 6 21:12 hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787 drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 105 Oct 6 21:12 openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 42 Oct 6 21:12 openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 29 Oct 6 21:12 openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 37 Oct 6 21:12 openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 64 Oct 6 21:12 openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 76 Oct 6 21:12 openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 38 Oct 6 21:12 openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 60 Oct 6 21:12 openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 21 Oct 6 21:12 openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 29 Oct 6 21:12 openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 39 Oct 6 21:12 openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Oct 6 21:12 openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 32 Oct 6 21:12 openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 51 Oct 6 21:12 openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 40 Oct 6 21:12 openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 31 Oct 6 21:12 openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 49 Oct 6 21:12 openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb drwxr-xr-x. 9 zuul zuul unconfined_u:object_r:user_home_t:s0 140 Oct 6 21:12 openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 27 Oct 6 21:12 openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 45 Oct 6 21:12 openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 22 Oct 6 21:12 openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 21 Oct 6 21:12 openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 38 Oct 6 21:12 openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 53 Oct 6 21:12 openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 20 Oct 6 21:12 openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Oct 6 21:12 openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Oct 6 21:12 openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Oct 6 21:12 openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90 drwxr-xr-x. 8 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Oct 6 21:12 openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 29 Oct 6 21:12 openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_7dae59545f22b3fb679a7fbf878a6379 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 37 Oct 6 21:12 openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Oct 6 21:12 openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Oct 6 21:12 openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Oct 6 21:12 openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124 drwxr-xr-x. 6 zuul zuul unconfined_u:object_r:user_home_t:s0 164 Oct 6 21:12 openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 46 Oct 6 21:12 openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 20 Oct 6 21:12 openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 20 Oct 6 21:12 openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 20 Oct 6 21:12 openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 20 Oct 6 21:12 openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Oct 6 21:12 openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Oct 6 21:12 openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754 drwxr-xr-x. 6 zuul zuul unconfined_u:object_r:user_home_t:s0 130 Oct 6 21:12 openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 47 Oct 6 21:12 openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 22 Oct 6 21:12 openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 52 Oct 6 21:12 openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 48 Oct 6 21:12 openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 57 Oct 6 21:12 openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 47 Oct 6 21:12 openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 62 Oct 6 21:12 openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 58 Oct 6 21:12 openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 60 Oct 6 21:12 openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 35 Oct 6 21:12 openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 77 Oct 6 21:12 openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469 drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 77 Oct 6 21:12 openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 34 Oct 6 21:12 openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 77 Oct 6 21:12 openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0 drwxr-xr-x. 5 zuul zuul unconfined_u:object_r:user_home_t:s0 77 Oct 6 21:12 openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5 drwxr-xr-x. 9 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Oct 6 21:12 openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 64 Oct 6 21:12 openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 25 Oct 6 21:12 openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 59 Oct 6 21:12 openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 29 Oct 6 21:12 openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 44 Oct 6 21:12 openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 37 Oct 6 21:12 openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Oct 6 21:12 openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Oct 6 21:12 openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2 drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 58 Oct 6 21:12 openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Oct 6 21:12 openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Oct 6 21:12 openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Oct 6 21:12 openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 30 Oct 6 21:12 openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 26 Oct 6 21:12 openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 27 Oct 6 21:12 openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 59 Oct 6 21:12 openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 60 Oct 6 21:12 openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3 drwxr-xr-x. 11 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Oct 6 21:12 openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 38 Oct 6 21:12 openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Oct 6 21:12 openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 35 Oct 6 21:12 openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 cert-manager-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-67c98b89c8-4rplv_c282850f-1ba4-47b7-ae36-8e423c6cc9c2/cert-manager-controller: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 cert-manager-cainjector /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-cainjector-5c5695d979-h44lv_a24f2023-bbe9-44a0-b17c-b662dacc6f34/cert-manager-cainjector: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 cert-manager-webhook /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/cert-manager_cert-manager-webhook-7f9f8648b9-lnppn_a088e670-59a9-490f-b5df-321b48308e10/cert-manager-webhook: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 csi-provisioner drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 hostpath-provisioner drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 liveness-probe drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 node-driver-registrar /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/csi-provisioner: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/hostpath-provisioner: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/liveness-probe: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/hostpath-provisioner_csi-hostpathplugin-hvm8g_12e733dd-0939-4f1b-9cbb-13897e093787/node-driver-registrar: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 fix-audit-permissions drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 openshift-apiserver drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 openshift-apiserver-check-endpoints /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/fix-audit-permissions: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver_apiserver-7fc54b8dd7-d2bhp_41e8708a-e40d-4d28-846b-c52eda4d1755/openshift-apiserver-check-endpoints: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 openshift-apiserver-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-apiserver-operator_openshift-apiserver-operator-7c88c4c865-kn67m_43ae1c37-047b-4ee2-9fee-41e337dd4ac8/openshift-apiserver-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 oauth-openshift /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication_oauth-openshift-74fc7c67cc-xqf8b_01feb2e0-a0f4-4573-8335-34e364e0ef40/oauth-openshift: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 authentication-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-authentication-operator_authentication-operator-7cc7ff75d5-g9qv8_ebf09b15-4bb1-44bf-9d54-e76fad5cf76e/authentication-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 machine-approver-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-machine-approver_machine-approver-7874c8775-kh4j9_ec1bae8b-3200-4ad9-b33b-cf8701f3027c/machine-approver-controller: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 cluster-samples-operator drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 cluster-samples-operator-watch /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-samples-operator_cluster-samples-operator-bc474d5d6-wshwg_f728c15e-d8de-4a9a-a3ea-fdcead95cb91/cluster-samples-operator-watch: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 cluster-version-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-cluster-version_cluster-version-operator-6d5d9649f6-x6d46_9fb762d1-812f-43f1-9eac-68034c1ecec7/cluster-version-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 openshift-api drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 openshift-config-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-api: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-config-operator_openshift-config-operator-77658b5b66-dq5sc_530553aa-0a1d-423e-8a22-f5eb4bdbb883/openshift-config-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 console /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_console-644bb77b49-5x5xk_9e649ef6-bbda-4ad9-8a09-ac3803dd0cc1/console: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 download-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console_downloads-65476884b9-9wcvx_6268b7fe-8910-4505-b404-6f1df638105c/download-server: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 conversion-webhook-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-conversion-webhook-595f9969b-l6z49_59748b9b-c309-4712-aa85-bb38d71c4915/conversion-webhook-server: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 console-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-console-operator_console-operator-5dbbc74dc9-cp5cd_e9127708-ccfd-4891-8a3a-f0cacb77e0f4/console-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 controller-manager /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager_controller-manager-778975cc4f-x5vcf_1a3e81c3-c292-4130-9436-f94062c91efd/controller-manager: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 openshift-controller-manager-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-controller-manager-operator_openshift-controller-manager-operator-7978d7d7f6-2nt8z_0f394926-bdb9-425c-b36e-264d7fd34550/openshift-controller-manager-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 dns drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/dns: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_dns-default-gbw49_13045510-8717-4a71-ade4-be95a76440a7/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 dns-node-resolver /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns_node-resolver-dn27q_6a23c0ee-5648-448c-b772-83dced2891ce/dns-node-resolver: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 dns-operator drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/dns-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-dns-operator_dns-operator-75f687757b-nz2xb_10603adc-d495-423c-9459-4caa405960bb/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 etcd drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 etcdctl drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 etcd-ensure-env-vars drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 etcd-metrics drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 etcd-readyz drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 etcd-resources-copy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 setup /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcdctl: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-ensure-env-vars: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-metrics: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-readyz: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/etcd-resources-copy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd_etcd-crc_b2a6a3b2ca08062d24afa4c01aaf9e4f/setup: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 etcd-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-etcd-operator_etcd-operator-768d5b5d86-722mg_0b5c38ff-1fa8-4219-994d-15776acd4a4d/etcd-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 cluster-image-registry-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_cluster-image-registry-operator-7769bd8d7d-q5cvv_b54e8941-2fc4-432a-9e51-39684df9089e/cluster-image-registry-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 registry /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_image-registry-75b7bb6564-f79cx_58adb7b7-cf6f-4b5a-a15c-00368dc5d229/registry: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 node-ca /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-image-registry_node-ca-l92hr_f8175ef1-0983-4bfe-a64e-fc6f5c5f7d2e/node-ca: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 serve-healthcheck-canary /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-canary_ingress-canary-2vhcn_0b5d722a-1123-4935-9740-52a08d018bc9/serve-healthcheck-canary: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 ingress-operator drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/ingress-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress-operator_ingress-operator-7d46d5bb6d-rrg6t_7d51f445-054a-4e4f-a67b-a828f5a32511/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 router /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ingress_router-default-5c9bf7bc58-6jctv_aa90b3c2-febd-4588-a063-7fbbe82f00c1/router: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 installer /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-12-crc_3557248c-8f70-4165-aa66-8df983e7e01a/installer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 installer /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-13-crc_e4ccb32c-914e-4f5c-9d1d-50cee1da7ce8/installer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 installer /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_installer-9-crc_2ad657a4-8b02-4373-8d0d-b0e25345dc90/installer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-apiserver drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-apiserver-cert-regeneration-controller drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-apiserver-cert-syncer drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-apiserver-check-endpoints drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-apiserver-insecure-readyz drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 setup /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-regeneration-controller: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-cert-syncer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-check-endpoints: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/kube-apiserver-insecure-readyz: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-crc_7f3419c3ca30b18b78e8dd2488b00489/setup: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_7dae59545f22b3fb679a7fbf878a6379: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 startup-monitor /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_7dae59545f22b3fb679a7fbf878a6379/startup-monitor: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-apiserver-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-apiserver-operator_kube-apiserver-operator-78d54458c4-sc8h7_ed024e5d-8fc2-4c22-803d-73f3c9795f19/kube-apiserver-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 installer /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-crc_79050916-d488-4806-b556-1b0078b31e53/installer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 installer /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-10-retry-1-crc_dc02677d-deed-4cc9-bb8c-0dd300f83655/installer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 installer /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_installer-11-crc_a45bfab9-f78b-4d72-b5b7-903e60401124/installer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 cluster-policy-controller drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-controller-manager drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-controller-manager-cert-syncer drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-controller-manager-recovery-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/cluster-policy-controller: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-cert-syncer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_kube-controller-manager-crc_bd6a3a59e513625ca0ae3724df2686bc/kube-controller-manager-recovery-controller: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-controller-manager-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager-operator_kube-controller-manager-operator-6f6cb54958-rbddb_c1620f19-8aa3-45cf-931b-7ae0e5cd14cf/kube-controller-manager-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 pruner /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-10-crc_2f155735-a9be-4621-a5f2-5ab4b6957acd/pruner: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 pruner /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-11-crc_1784282a-268d-4e44-a766-43281414e2dc/pruner: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 pruner /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-8-crc_72854c1e-5ae2-4ed6-9e50-ff3bccde2635/pruner: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 pruner /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-controller-manager_revision-pruner-9-crc_a0453d24-e872-43af-9e7a-86227c26d200/pruner: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 installer /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-7-crc_b57cce81-8ea0-4c4d-aae1-ee024d201c15/installer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 installer /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_installer-8-crc_aca1f9ff-a685-4a78-b461-3931b757f754/installer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-scheduler drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-scheduler-cert-syncer drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-scheduler-recovery-controller drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 wait-for-host-port /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-cert-syncer: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/kube-scheduler-recovery-controller: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler_openshift-kube-scheduler-crc_6a57a7fb1944b43a6bd11a349520d301/wait-for-host-port: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-scheduler-operator-container /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-scheduler-operator_openshift-kube-scheduler-operator-5d9b995f6b-fcgd7_71af81a9-7d43-49b2-9287-c375900aa905/kube-scheduler-operator-container: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 migrator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator_migrator-f7c6d88df-q2fnv_cf1a8966-f594-490a-9fbb-eec5bafd13d3/migrator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-storage-version-migrator-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-kube-storage-version-migrator-operator_kube-storage-version-migrator-operator-686c6c748c-qbnnr_9ae0dfbb-a0a9-45bb-85b5-cd9f94f64fe7/kube-storage-version-migrator-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 control-plane-machine-set-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_control-plane-machine-set-operator-649bd778b4-tt5tw_45a8038e-e7f2-4d93-a6f5-7753aa54e63f/control-plane-machine-set-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 machine-api-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-api_machine-api-operator-788b7c6b6c-ctdmb_4f8aa612-9da0-4a2b-911e-6a1764a4e74e/machine-api-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy-crio drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 setup /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/kube-rbac-proxy-crio: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_kube-rbac-proxy-crio-crc_d3ae206906481b4831fd849b559269c8/setup: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 machine-config-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-controller-6df6df6b6b-58shh_297ab9b6-2186-4d5b-a952-2bfd59af63c4/machine-config-controller: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 machine-config-daemon /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-daemon-zpnhg_9d0dcce3-d96e-48cb-9b9f-362105911589/machine-config-daemon: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 machine-config-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-operator-76788bff89-wkjgm_120b38dc-8236-4fa6-a452-642b8ad738ee/machine-config-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 machine-config-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-machine-config-operator_machine-config-server-v65wr_bf1a8b70-3856-486f-9912-a2de1d57c3fb/machine-config-server: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 extract-content drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 extract-utilities drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 registry-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-content: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/extract-utilities: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_certified-operators-bv2mg_583472c4-5e8a-467a-8610-59e7b7c00469/registry-server: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 extract-content drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 extract-utilities drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 registry-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-content: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/extract-utilities: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_community-operators-8wqnf_3d93e8cb-c3fa-4b4a-af3c-0fc0e9100938/registry-server: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 marketplace-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_marketplace-operator-8b455464d-bf2z9_58097cde-9416-4150-be4a-25b53f7fb3fc/marketplace-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 extract-content drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 extract-utilities drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 registry-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-content: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/extract-utilities: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-marketplace-8mp2f_d511265d-6a8a-436f-8de0-6861d491dfb0/registry-server: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 extract-content drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 extract-utilities drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 registry-server /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-content: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/extract-utilities: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-marketplace_redhat-operators-bm97r_906d3afd-9d41-4eba-b20f-f4bbfda602d5/registry-server: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 bond-cni-plugin drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 cni-plugins drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 egress-router-binary-copy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-multus-additional-cni-plugins drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 routeoverride-cni drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 whereabouts-cni drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 whereabouts-cni-bincopy /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/bond-cni-plugin: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/cni-plugins: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/egress-router-binary-copy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/kube-multus-additional-cni-plugins: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/routeoverride-cni: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-additional-cni-plugins-bzj2p_7dbadf0a-ba02-47d6-96a9-0995c1e8e4a8/whereabouts-cni-bincopy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 multus-admission-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-admission-controller-6c7c885997-4hbbc_d5025cb4-ddb0-4107-88c1-bcbcdb779ac0/multus-admission-controller: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-multus /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_multus-q88th_475321a1-8b7e-4033-8f72-b05a8b377347/kube-multus: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 network-metrics-daemon /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-multus_network-metrics-daemon-qdfr4_a702c6d2-4dde-4077-ab8c-0f8df804bf7a/network-metrics-daemon: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 check-endpoints /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-source-5c5478f8c-vqvt7_d0f40333-c860-4c04-8058-a0bf572dcf12/check-endpoints: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 network-check-target-container /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-diagnostics_network-check-target-v54bt_34a48baf-1bee-4921-8bb2-9b7320e76f79/network-check-target-container: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 approver drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 webhook /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/approver: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-node-identity_network-node-identity-7xghp_51a02bbf-2d40-4f84-868a-d399ea18a846/webhook: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 iptables-alerter /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_iptables-alerter-wwpnd_2b6d14a5-ca00-40c7-af7a-051a98a24eed/iptables-alerter: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 network-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-network-operator_network-operator-767c585db5-zd56b_cc291782-27d2-4a74-af79-c7dcb31535d2/network-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 fix-audit-permissions drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 oauth-apiserver /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/fix-audit-permissions: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-oauth-apiserver_apiserver-69c565c9b6-vbdpd_5bacb25d-97b6-4491-8fb4-99feae1d802a/oauth-apiserver: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 catalog-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_catalog-operator-857456c46-7f5wf_8a5ae51d-d173-4531-8975-f164c975ce1f/catalog-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 collect-profiles /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251935-d7x6j_51936587-a4af-470d-ad92-8ab9062cbc72/collect-profiles: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 collect-profiles /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29251950-x8jjd_ad171c4b-8408-4370-8e86-502999788ddb/collect-profiles: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 collect-profiles /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_collect-profiles-29329740-2jhbz_f2d0e16a-3cb6-4824-91ad-bb8d11be9bed/collect-profiles: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 olm-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_olm-operator-6d8474f75f-x54mh_c085412c-b875-46c9-ae3e-e6b0d8067091/olm-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 packageserver /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_packageserver-8464bcc55b-sjnqz_bd556935-a077-45df-ba3f-d42c39326ccd/packageserver: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 package-server-manager /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-operator-lifecycle-manager_package-server-manager-84d578d794-jw7r2_63eb7413-02c3-4d6e-bb48-e5ffe5ce15be/package-server-manager: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 ovnkube-cluster-manager /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/kube-rbac-proxy: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-control-plane-77c846df58-6l97b_410cf605-1970-4691-9c95-53fdc123b1f3/ovnkube-cluster-manager: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kubecfg-setup drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy-node drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 kube-rbac-proxy-ovn-metrics drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 nbdb drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 northd drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 ovn-acl-logging drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 ovn-controller drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 ovnkube-controller drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 sbdb /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kubecfg-setup: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-node: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/kube-rbac-proxy-ovn-metrics: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/nbdb: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/northd: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-acl-logging: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovn-controller: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/ovnkube-controller: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-ovn-kubernetes_ovnkube-node-pzrk2_febd3500-e225-469f-9904-26156995b94a/sbdb: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 route-controller-manager /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-route-controller-manager_route-controller-manager-776b8b7477-sfpvs_21d29937-debd-4407-b2b1-d1053cb0f342/route-controller-manager: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 service-ca-operator /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca-operator_service-ca-operator-546b4f8984-pwccz_6d67253e-2acd-4bc1-8185-793587da4f17/service-ca-operator: total 0 /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Oct 6 21:12 service-ca-controller /home/zuul/ci-framework-data/logs/crc/crc-logs-artifacts/pods/openshift-service-ca_service-ca-666f99b6f-kk8kg_e4a7de23-6134-4044-902a-0900dc04a501/service-ca-controller: total 0 /home/zuul/ci-framework-data/logs/openstack-k8s-operators-openstack-must-gather: total 16 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3336 Oct 6 21:12 event-filter.html -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 4719 Oct 6 21:12 must-gather.logs -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 110 Oct 6 21:12 timestamp /home/zuul/ci-framework-data/tmp: total 0 /home/zuul/ci-framework-data/volumes: total 0 home/zuul/zuul-output/logs/README.html0000644000175000017500000000306615071030427016715 0ustar zuulzuul README for CIFMW Logs

Logs of interest

Generated content of interest

home/zuul/zuul-output/logs/installed-pkgs.log0000644000175000017500000005014315071030430020506 0ustar zuulzuulaardvark-dns-1.16.0-1.el9.x86_64 abattis-cantarell-fonts-0.301-4.el9.noarch acl-2.3.1-4.el9.x86_64 adobe-source-code-pro-fonts-2.030.1.050-12.el9.1.noarch alternatives-1.24-2.el9.x86_64 annobin-12.98-1.el9.x86_64 ansible-core-2.14.18-1.el9.x86_64 apr-1.7.0-12.el9.x86_64 apr-util-1.6.1-23.el9.x86_64 apr-util-bdb-1.6.1-23.el9.x86_64 apr-util-openssl-1.6.1-23.el9.x86_64 attr-2.5.1-3.el9.x86_64 audit-3.1.5-7.el9.x86_64 audit-libs-3.1.5-7.el9.x86_64 authselect-1.2.6-3.el9.x86_64 authselect-compat-1.2.6-3.el9.x86_64 authselect-libs-1.2.6-3.el9.x86_64 avahi-libs-0.8-23.el9.x86_64 basesystem-11-13.el9.noarch bash-5.1.8-9.el9.x86_64 bash-completion-2.11-5.el9.noarch binutils-2.35.2-67.el9.x86_64 binutils-gold-2.35.2-67.el9.x86_64 buildah-1.41.3-1.el9.x86_64 bzip2-1.0.8-10.el9.x86_64 bzip2-libs-1.0.8-10.el9.x86_64 ca-certificates-2024.2.69_v8.0.303-91.4.el9.noarch c-ares-1.19.1-2.el9.x86_64 centos-gpg-keys-9.0-30.el9.noarch centos-logos-90.8-3.el9.x86_64 centos-stream-release-9.0-30.el9.noarch centos-stream-repos-9.0-30.el9.noarch checkpolicy-3.6-1.el9.x86_64 chrony-4.6.1-2.el9.x86_64 cloud-init-24.4-7.el9.noarch cloud-utils-growpart-0.33-1.el9.x86_64 cmake-filesystem-3.26.5-2.el9.x86_64 cockpit-bridge-347-1.el9.noarch cockpit-system-347-1.el9.noarch cockpit-ws-347-1.el9.x86_64 cockpit-ws-selinux-347-1.el9.x86_64 conmon-2.1.13-1.el9.x86_64 containers-common-1-134.el9.x86_64 containers-common-extra-1-134.el9.x86_64 container-selinux-2.242.0-1.el9.noarch coreutils-8.32-39.el9.x86_64 coreutils-common-8.32-39.el9.x86_64 cpio-2.13-16.el9.x86_64 cpp-11.5.0-11.el9.x86_64 cracklib-2.9.6-27.el9.x86_64 cracklib-dicts-2.9.6-27.el9.x86_64 createrepo_c-0.20.1-4.el9.x86_64 createrepo_c-libs-0.20.1-4.el9.x86_64 criu-3.19-3.el9.x86_64 criu-libs-3.19-3.el9.x86_64 cronie-1.5.7-14.el9.x86_64 cronie-anacron-1.5.7-14.el9.x86_64 crontabs-1.11-26.20190603git.el9.noarch crun-1.24-1.el9.x86_64 crypto-policies-20250905-1.git377cc42.el9.noarch crypto-policies-scripts-20250905-1.git377cc42.el9.noarch cryptsetup-libs-2.8.1-2.el9.x86_64 curl-7.76.1-34.el9.x86_64 cyrus-sasl-2.1.27-21.el9.x86_64 cyrus-sasl-devel-2.1.27-21.el9.x86_64 cyrus-sasl-gssapi-2.1.27-21.el9.x86_64 cyrus-sasl-lib-2.1.27-21.el9.x86_64 dbus-1.12.20-8.el9.x86_64 dbus-broker-28-7.el9.x86_64 dbus-common-1.12.20-8.el9.noarch dbus-libs-1.12.20-8.el9.x86_64 dbus-tools-1.12.20-8.el9.x86_64 debugedit-5.0-11.el9.x86_64 dejavu-sans-fonts-2.37-18.el9.noarch desktop-file-utils-0.26-6.el9.x86_64 device-mapper-1.02.206-2.el9.x86_64 device-mapper-libs-1.02.206-2.el9.x86_64 dhcp-client-4.4.2-19.b1.el9.x86_64 dhcp-common-4.4.2-19.b1.el9.noarch diffutils-3.7-12.el9.x86_64 dnf-4.14.0-31.el9.noarch dnf-data-4.14.0-31.el9.noarch dnf-plugins-core-4.3.0-23.el9.noarch dracut-057-102.git20250818.el9.x86_64 dracut-config-generic-057-102.git20250818.el9.x86_64 dracut-network-057-102.git20250818.el9.x86_64 dracut-squash-057-102.git20250818.el9.x86_64 dwz-0.16-1.el9.x86_64 e2fsprogs-1.46.5-8.el9.x86_64 e2fsprogs-libs-1.46.5-8.el9.x86_64 ed-1.14.2-12.el9.x86_64 efi-srpm-macros-6-4.el9.noarch elfutils-0.193-1.el9.x86_64 elfutils-debuginfod-client-0.193-1.el9.x86_64 elfutils-default-yama-scope-0.193-1.el9.noarch elfutils-libelf-0.193-1.el9.x86_64 elfutils-libs-0.193-1.el9.x86_64 emacs-filesystem-27.2-18.el9.noarch enchant-1.6.0-30.el9.x86_64 ethtool-6.15-2.el9.x86_64 expat-2.5.0-5.el9.x86_64 expect-5.45.4-16.el9.x86_64 file-5.39-16.el9.x86_64 file-libs-5.39-16.el9.x86_64 filesystem-3.16-5.el9.x86_64 findutils-4.8.0-7.el9.x86_64 fonts-filesystem-2.0.5-7.el9.1.noarch fonts-srpm-macros-2.0.5-7.el9.1.noarch fuse3-3.10.2-9.el9.x86_64 fuse3-libs-3.10.2-9.el9.x86_64 fuse-common-3.10.2-9.el9.x86_64 fuse-libs-2.9.9-17.el9.x86_64 fuse-overlayfs-1.15-1.el9.x86_64 gawk-5.1.0-6.el9.x86_64 gawk-all-langpacks-5.1.0-6.el9.x86_64 gcc-11.5.0-11.el9.x86_64 gcc-c++-11.5.0-11.el9.x86_64 gcc-plugin-annobin-11.5.0-11.el9.x86_64 gdb-minimal-16.3-2.el9.x86_64 gdbm-libs-1.23-1.el9.x86_64 gdisk-1.0.7-5.el9.x86_64 gdk-pixbuf2-2.42.6-6.el9.x86_64 geolite2-city-20191217-6.el9.noarch geolite2-country-20191217-6.el9.noarch gettext-0.21-8.el9.x86_64 gettext-libs-0.21-8.el9.x86_64 ghc-srpm-macros-1.5.0-6.el9.noarch git-2.47.3-1.el9.x86_64 git-core-2.47.3-1.el9.x86_64 git-core-doc-2.47.3-1.el9.noarch glib2-2.68.4-16.el9.x86_64 glibc-2.34-232.el9.x86_64 glibc-common-2.34-232.el9.x86_64 glibc-devel-2.34-232.el9.x86_64 glibc-gconv-extra-2.34-232.el9.x86_64 glibc-headers-2.34-232.el9.x86_64 glibc-langpack-en-2.34-232.el9.x86_64 glib-networking-2.68.3-3.el9.x86_64 gmp-6.2.0-13.el9.x86_64 gnupg2-2.3.3-4.el9.x86_64 gnutls-3.8.3-9.el9.x86_64 gobject-introspection-1.68.0-11.el9.x86_64 go-srpm-macros-3.6.0-11.el9.noarch gpgme-1.15.1-6.el9.x86_64 gpg-pubkey-8483c65d-5ccc5b19 grep-3.6-5.el9.x86_64 groff-base-1.22.4-10.el9.x86_64 grub2-common-2.06-115.el9.noarch grub2-pc-2.06-115.el9.x86_64 grub2-pc-modules-2.06-115.el9.noarch grub2-tools-2.06-115.el9.x86_64 grub2-tools-minimal-2.06-115.el9.x86_64 grubby-8.40-69.el9.x86_64 gsettings-desktop-schemas-40.0-7.el9.x86_64 gssproxy-0.8.4-7.el9.x86_64 gzip-1.12-1.el9.x86_64 hostname-3.23-6.el9.x86_64 httpd-tools-2.4.62-7.el9.x86_64 hunspell-1.7.0-11.el9.x86_64 hunspell-en-GB-0.20140811.1-20.el9.noarch hunspell-en-US-0.20140811.1-20.el9.noarch hunspell-filesystem-1.7.0-11.el9.x86_64 hwdata-0.348-9.20.el9.noarch ima-evm-utils-1.6.2-2.el9.x86_64 info-6.7-15.el9.x86_64 inih-49-6.el9.x86_64 initscripts-rename-device-10.11.8-4.el9.x86_64 initscripts-service-10.11.8-4.el9.noarch ipcalc-1.0.0-5.el9.x86_64 iproute-6.14.0-2.el9.x86_64 iproute-tc-6.14.0-2.el9.x86_64 iptables-libs-1.8.10-11.el9.x86_64 iptables-nft-1.8.10-11.el9.x86_64 iptables-nft-services-1.8.10-11.el9.noarch iputils-20210202-15.el9.x86_64 irqbalance-1.9.4-4.el9.x86_64 jansson-2.14-1.el9.x86_64 jq-1.6-19.el9.x86_64 json-c-0.14-11.el9.x86_64 json-glib-1.6.6-1.el9.x86_64 kbd-2.4.0-11.el9.x86_64 kbd-legacy-2.4.0-11.el9.noarch kbd-misc-2.4.0-11.el9.noarch kernel-5.14.0-620.el9.x86_64 kernel-core-5.14.0-620.el9.x86_64 kernel-headers-5.14.0-620.el9.x86_64 kernel-modules-5.14.0-620.el9.x86_64 kernel-modules-core-5.14.0-620.el9.x86_64 kernel-srpm-macros-1.0-14.el9.noarch kernel-tools-5.14.0-620.el9.x86_64 kernel-tools-libs-5.14.0-620.el9.x86_64 kexec-tools-2.0.29-10.el9.x86_64 keyutils-1.6.3-1.el9.x86_64 keyutils-libs-1.6.3-1.el9.x86_64 kmod-28-11.el9.x86_64 kmod-libs-28-11.el9.x86_64 kpartx-0.8.7-39.el9.x86_64 krb5-libs-1.21.1-8.el9.x86_64 langpacks-core-en_GB-3.0-16.el9.noarch langpacks-core-font-en-3.0-16.el9.noarch langpacks-en_GB-3.0-16.el9.noarch less-590-6.el9.x86_64 libacl-2.3.1-4.el9.x86_64 libappstream-glib-0.7.18-5.el9.x86_64 libarchive-3.5.3-6.el9.x86_64 libassuan-2.5.5-3.el9.x86_64 libatomic-11.5.0-11.el9.x86_64 libattr-2.5.1-3.el9.x86_64 libbasicobjects-0.1.1-53.el9.x86_64 libblkid-2.37.4-21.el9.x86_64 libbpf-1.5.0-2.el9.x86_64 libbrotli-1.0.9-7.el9.x86_64 libburn-1.5.4-5.el9.x86_64 libcap-2.48-10.el9.x86_64 libcap-ng-0.8.2-7.el9.x86_64 libcbor-0.7.0-5.el9.x86_64 libcollection-0.7.0-53.el9.x86_64 libcom_err-1.46.5-8.el9.x86_64 libcomps-0.1.18-1.el9.x86_64 libcurl-7.76.1-34.el9.x86_64 libdaemon-0.14-23.el9.x86_64 libdb-5.3.28-57.el9.x86_64 libdhash-0.5.0-53.el9.x86_64 libdnf-0.69.0-16.el9.x86_64 libeconf-0.4.1-4.el9.x86_64 libedit-3.1-38.20210216cvs.el9.x86_64 libestr-0.1.11-4.el9.x86_64 libev-4.33-6.el9.x86_64 libevent-2.1.12-8.el9.x86_64 libfastjson-0.99.9-5.el9.x86_64 libfdisk-2.37.4-21.el9.x86_64 libffi-3.4.2-8.el9.x86_64 libffi-devel-3.4.2-8.el9.x86_64 libfido2-1.13.0-2.el9.x86_64 libgcc-11.5.0-11.el9.x86_64 libgcrypt-1.10.0-11.el9.x86_64 libgomp-11.5.0-11.el9.x86_64 libgpg-error-1.42-5.el9.x86_64 libgpg-error-devel-1.42-5.el9.x86_64 libibverbs-57.0-2.el9.x86_64 libicu-67.1-10.el9.x86_64 libidn2-2.3.0-7.el9.x86_64 libini_config-1.3.1-53.el9.x86_64 libisoburn-1.5.4-5.el9.x86_64 libisofs-1.5.4-4.el9.x86_64 libjpeg-turbo-2.0.90-7.el9.x86_64 libkcapi-1.4.0-2.el9.x86_64 libkcapi-hmaccalc-1.4.0-2.el9.x86_64 libksba-1.5.1-7.el9.x86_64 libldb-4.22.4-6.el9.x86_64 libmaxminddb-1.5.2-4.el9.x86_64 libmnl-1.0.4-16.el9.x86_64 libmodulemd-2.13.0-2.el9.x86_64 libmount-2.37.4-21.el9.x86_64 libmpc-1.2.1-4.el9.x86_64 libndp-1.9-1.el9.x86_64 libnet-1.2-7.el9.x86_64 libnetfilter_conntrack-1.0.9-1.el9.x86_64 libnfnetlink-1.0.1-23.el9.x86_64 libnfsidmap-2.5.4-39.el9.x86_64 libnftnl-1.2.6-4.el9.x86_64 libnghttp2-1.43.0-6.el9.x86_64 libnl3-3.11.0-1.el9.x86_64 libnl3-cli-3.11.0-1.el9.x86_64 libosinfo-1.10.0-1.el9.x86_64 libpath_utils-0.2.1-53.el9.x86_64 libpcap-1.10.0-4.el9.x86_64 libpipeline-1.5.3-4.el9.x86_64 libpkgconf-1.7.3-10.el9.x86_64 libpng-1.6.37-12.el9.x86_64 libproxy-0.4.15-35.el9.x86_64 libproxy-webkitgtk4-0.4.15-35.el9.x86_64 libpsl-0.21.1-5.el9.x86_64 libpwquality-1.4.4-8.el9.x86_64 libref_array-0.1.5-53.el9.x86_64 librepo-1.14.5-3.el9.x86_64 libreport-filesystem-2.15.2-6.el9.noarch libseccomp-2.5.2-2.el9.x86_64 libselinux-3.6-3.el9.x86_64 libselinux-utils-3.6-3.el9.x86_64 libsemanage-3.6-5.el9.x86_64 libsepol-3.6-3.el9.x86_64 libsigsegv-2.13-4.el9.x86_64 libslirp-4.4.0-8.el9.x86_64 libsmartcols-2.37.4-21.el9.x86_64 libsolv-0.7.24-3.el9.x86_64 libsoup-2.72.0-10.el9.x86_64 libss-1.46.5-8.el9.x86_64 libssh-0.10.4-13.el9.x86_64 libssh-config-0.10.4-13.el9.noarch libsss_certmap-2.9.7-4.el9.x86_64 libsss_idmap-2.9.7-4.el9.x86_64 libsss_nss_idmap-2.9.7-4.el9.x86_64 libsss_sudo-2.9.7-4.el9.x86_64 libstdc++-11.5.0-11.el9.x86_64 libstdc++-devel-11.5.0-11.el9.x86_64 libstemmer-0-18.585svn.el9.x86_64 libsysfs-2.1.1-11.el9.x86_64 libtalloc-2.4.3-1.el9.x86_64 libtasn1-4.16.0-9.el9.x86_64 libtdb-1.4.13-1.el9.x86_64 libteam-1.31-16.el9.x86_64 libtevent-0.16.2-1.el9.x86_64 libtirpc-1.3.3-9.el9.x86_64 libtool-ltdl-2.4.6-46.el9.x86_64 libunistring-0.9.10-15.el9.x86_64 liburing-2.5-1.el9.x86_64 libuser-0.63-17.el9.x86_64 libutempter-1.2.1-6.el9.x86_64 libuuid-2.37.4-21.el9.x86_64 libverto-0.3.2-3.el9.x86_64 libverto-libev-0.3.2-3.el9.x86_64 libvirt-client-10.10.0-15.el9.x86_64 libvirt-libs-10.10.0-15.el9.x86_64 libwbclient-4.22.4-6.el9.x86_64 libxcrypt-4.4.18-3.el9.x86_64 libxcrypt-compat-4.4.18-3.el9.x86_64 libxcrypt-devel-4.4.18-3.el9.x86_64 libxml2-2.9.13-12.el9.x86_64 libxml2-devel-2.9.13-12.el9.x86_64 libxslt-1.1.34-12.el9.x86_64 libxslt-devel-1.1.34-12.el9.x86_64 libyaml-0.2.5-7.el9.x86_64 libzstd-1.5.5-1.el9.x86_64 llvm-filesystem-20.1.8-3.el9.x86_64 llvm-libs-20.1.8-3.el9.x86_64 lmdb-libs-0.9.29-3.el9.x86_64 logrotate-3.18.0-12.el9.x86_64 lshw-B.02.20-2.el9.x86_64 lsscsi-0.32-6.el9.x86_64 lua-libs-5.4.4-4.el9.x86_64 lua-srpm-macros-1-6.el9.noarch lz4-libs-1.9.3-5.el9.x86_64 lzo-2.10-7.el9.x86_64 make-4.3-8.el9.x86_64 man-db-2.9.3-9.el9.x86_64 microcode_ctl-20250812-1.el9.noarch mpfr-4.1.0-7.el9.x86_64 ncurses-6.2-12.20210508.el9.x86_64 ncurses-base-6.2-12.20210508.el9.noarch ncurses-c++-libs-6.2-12.20210508.el9.x86_64 ncurses-devel-6.2-12.20210508.el9.x86_64 ncurses-libs-6.2-12.20210508.el9.x86_64 netavark-1.16.0-1.el9.x86_64 nettle-3.10.1-1.el9.x86_64 NetworkManager-1.54.1-1.el9.x86_64 NetworkManager-libnm-1.54.1-1.el9.x86_64 NetworkManager-team-1.54.1-1.el9.x86_64 NetworkManager-tui-1.54.1-1.el9.x86_64 newt-0.52.21-11.el9.x86_64 nfs-utils-2.5.4-39.el9.x86_64 nftables-1.0.9-4.el9.x86_64 npth-1.6-8.el9.x86_64 numactl-libs-2.0.19-3.el9.x86_64 ocaml-srpm-macros-6-6.el9.noarch oddjob-0.34.7-7.el9.x86_64 oddjob-mkhomedir-0.34.7-7.el9.x86_64 oniguruma-6.9.6-1.el9.6.x86_64 openblas-srpm-macros-2-11.el9.noarch openldap-2.6.8-4.el9.x86_64 openldap-devel-2.6.8-4.el9.x86_64 openssh-9.9p1-1.el9.x86_64 openssh-clients-9.9p1-1.el9.x86_64 openssh-server-9.9p1-1.el9.x86_64 openssl-3.5.1-5.el9.x86_64 openssl-devel-3.5.1-5.el9.x86_64 openssl-fips-provider-3.5.1-5.el9.x86_64 openssl-libs-3.5.1-5.el9.x86_64 osinfo-db-20250606-1.el9.noarch osinfo-db-tools-1.10.0-1.el9.x86_64 os-prober-1.77-12.el9.x86_64 p11-kit-0.25.10-1.el9.x86_64 p11-kit-trust-0.25.10-1.el9.x86_64 PackageKit-1.2.6-1.el9.x86_64 PackageKit-glib-1.2.6-1.el9.x86_64 pam-1.5.1-26.el9.x86_64 parted-3.5-3.el9.x86_64 passt-0^20250512.g8ec1341-2.el9.x86_64 passt-selinux-0^20250512.g8ec1341-2.el9.noarch passwd-0.80-12.el9.x86_64 patch-2.7.6-16.el9.x86_64 pciutils-libs-3.7.0-7.el9.x86_64 pcre2-10.40-6.el9.x86_64 pcre2-syntax-10.40-6.el9.noarch pcre-8.44-4.el9.x86_64 perl-AutoLoader-5.74-483.el9.noarch perl-B-1.80-483.el9.x86_64 perl-base-2.27-483.el9.noarch perl-Carp-1.50-460.el9.noarch perl-Class-Struct-0.66-483.el9.noarch perl-constant-1.33-461.el9.noarch perl-Data-Dumper-2.174-462.el9.x86_64 perl-Digest-1.19-4.el9.noarch perl-Digest-MD5-2.58-4.el9.x86_64 perl-DynaLoader-1.47-483.el9.x86_64 perl-Encode-3.08-462.el9.x86_64 perl-Errno-1.30-483.el9.x86_64 perl-Error-0.17029-7.el9.noarch perl-Exporter-5.74-461.el9.noarch perl-Fcntl-1.13-483.el9.x86_64 perl-File-Basename-2.85-483.el9.noarch perl-File-Find-1.37-483.el9.noarch perl-FileHandle-2.03-483.el9.noarch perl-File-Path-2.18-4.el9.noarch perl-File-stat-1.09-483.el9.noarch perl-File-Temp-0.231.100-4.el9.noarch perl-Getopt-Long-2.52-4.el9.noarch perl-Getopt-Std-1.12-483.el9.noarch perl-Git-2.47.3-1.el9.noarch perl-HTTP-Tiny-0.076-462.el9.noarch perl-if-0.60.800-483.el9.noarch perl-interpreter-5.32.1-483.el9.x86_64 perl-IO-1.43-483.el9.x86_64 perl-IO-Socket-IP-0.41-5.el9.noarch perl-IO-Socket-SSL-2.073-2.el9.noarch perl-IPC-Open3-1.21-483.el9.noarch perl-lib-0.65-483.el9.x86_64 perl-libnet-3.13-4.el9.noarch perl-libs-5.32.1-483.el9.x86_64 perl-MIME-Base64-3.16-4.el9.x86_64 perl-Mozilla-CA-20200520-6.el9.noarch perl-mro-1.23-483.el9.x86_64 perl-NDBM_File-1.15-483.el9.x86_64 perl-Net-SSLeay-1.94-3.el9.x86_64 perl-overload-1.31-483.el9.noarch perl-overloading-0.02-483.el9.noarch perl-parent-0.238-460.el9.noarch perl-PathTools-3.78-461.el9.x86_64 perl-Pod-Escapes-1.07-460.el9.noarch perl-podlators-4.14-460.el9.noarch perl-Pod-Perldoc-3.28.01-461.el9.noarch perl-Pod-Simple-3.42-4.el9.noarch perl-Pod-Usage-2.01-4.el9.noarch perl-POSIX-1.94-483.el9.x86_64 perl-Scalar-List-Utils-1.56-462.el9.x86_64 perl-SelectSaver-1.02-483.el9.noarch perl-Socket-2.031-4.el9.x86_64 perl-srpm-macros-1-41.el9.noarch perl-Storable-3.21-460.el9.x86_64 perl-subs-1.03-483.el9.noarch perl-Symbol-1.08-483.el9.noarch perl-Term-ANSIColor-5.01-461.el9.noarch perl-Term-Cap-1.17-460.el9.noarch perl-TermReadKey-2.38-11.el9.x86_64 perl-Text-ParseWords-3.30-460.el9.noarch perl-Text-Tabs+Wrap-2013.0523-460.el9.noarch perl-Time-Local-1.300-7.el9.noarch perl-URI-5.09-3.el9.noarch perl-vars-1.05-483.el9.noarch pigz-2.5-4.el9.x86_64 pkgconf-1.7.3-10.el9.x86_64 pkgconf-m4-1.7.3-10.el9.noarch pkgconf-pkg-config-1.7.3-10.el9.x86_64 podman-5.6.0-2.el9.x86_64 policycoreutils-3.6-3.el9.x86_64 policycoreutils-python-utils-3.6-3.el9.noarch polkit-0.117-14.el9.x86_64 polkit-libs-0.117-14.el9.x86_64 polkit-pkla-compat-0.1-21.el9.x86_64 popt-1.18-8.el9.x86_64 prefixdevname-0.1.0-8.el9.x86_64 procps-ng-3.3.17-14.el9.x86_64 protobuf-c-1.3.3-13.el9.x86_64 psmisc-23.4-3.el9.x86_64 publicsuffix-list-dafsa-20210518-3.el9.noarch pyproject-srpm-macros-1.16.2-1.el9.noarch python3-3.9.23-2.el9.x86_64 python3-argcomplete-1.12.0-5.el9.noarch python3-attrs-20.3.0-7.el9.noarch python3-audit-3.1.5-7.el9.x86_64 python3-babel-2.9.1-2.el9.noarch python3-cffi-1.14.5-5.el9.x86_64 python3-chardet-4.0.0-5.el9.noarch python3-configobj-5.0.6-25.el9.noarch python3-cryptography-36.0.1-5.el9.x86_64 python3-dasbus-1.7-1.el9.noarch python3-dateutil-2.8.1-7.el9.noarch python3-dbus-1.2.18-2.el9.x86_64 python3-devel-3.9.23-2.el9.x86_64 python3-distro-1.5.0-7.el9.noarch python3-dnf-4.14.0-31.el9.noarch python3-dnf-plugins-core-4.3.0-23.el9.noarch python3-enchant-3.2.0-5.el9.noarch python3-file-magic-5.39-16.el9.noarch python3-gobject-base-3.40.1-6.el9.x86_64 python3-gobject-base-noarch-3.40.1-6.el9.noarch python3-gpg-1.15.1-6.el9.x86_64 python3-hawkey-0.69.0-16.el9.x86_64 python3-idna-2.10-7.el9.1.noarch python3-jinja2-2.11.3-8.el9.noarch python3-jmespath-0.9.4-11.el9.noarch python3-jsonpatch-1.21-16.el9.noarch python3-jsonpointer-2.0-4.el9.noarch python3-jsonschema-3.2.0-13.el9.noarch python3-libcomps-0.1.18-1.el9.x86_64 python3-libdnf-0.69.0-16.el9.x86_64 python3-libs-3.9.23-2.el9.x86_64 python3-libselinux-3.6-3.el9.x86_64 python3-libsemanage-3.6-5.el9.x86_64 python3-libvirt-10.10.0-1.el9.x86_64 python3-libxml2-2.9.13-12.el9.x86_64 python3-lxml-4.6.5-3.el9.x86_64 python3-markupsafe-1.1.1-12.el9.x86_64 python3-netaddr-0.10.1-3.el9.noarch python3-netifaces-0.10.6-15.el9.x86_64 python3-oauthlib-3.1.1-5.el9.noarch python3-packaging-20.9-5.el9.noarch python3-pexpect-4.8.0-7.el9.noarch python3-pip-21.3.1-1.el9.noarch python3-pip-wheel-21.3.1-1.el9.noarch python3-ply-3.11-14.el9.noarch python3-policycoreutils-3.6-3.el9.noarch python3-prettytable-0.7.2-27.el9.noarch python3-ptyprocess-0.6.0-12.el9.noarch python3-pycparser-2.20-6.el9.noarch python3-pyparsing-2.4.7-9.el9.noarch python3-pyrsistent-0.17.3-8.el9.x86_64 python3-pyserial-3.4-12.el9.noarch python3-pysocks-1.7.1-12.el9.noarch python3-pytz-2021.1-5.el9.noarch python3-pyyaml-5.4.1-6.el9.x86_64 python3-requests-2.25.1-10.el9.noarch python3-resolvelib-0.5.4-5.el9.noarch python3-rpm-4.16.1.3-39.el9.x86_64 python3-rpm-generators-12-9.el9.noarch python3-rpm-macros-3.9-54.el9.noarch python3-setools-4.4.4-1.el9.x86_64 python3-setuptools-53.0.0-15.el9.noarch python3-setuptools-wheel-53.0.0-15.el9.noarch python3-six-1.15.0-9.el9.noarch python3-systemd-234-19.el9.x86_64 python3-urllib3-1.26.5-6.el9.noarch python-rpm-macros-3.9-54.el9.noarch python-srpm-macros-3.9-54.el9.noarch python-unversioned-command-3.9.23-2.el9.noarch qemu-guest-agent-9.1.0-29.el9.x86_64 qt5-srpm-macros-5.15.9-1.el9.noarch quota-4.09-4.el9.x86_64 quota-nls-4.09-4.el9.noarch readline-8.1-4.el9.x86_64 readline-devel-8.1-4.el9.x86_64 redhat-rpm-config-210-1.el9.noarch rootfiles-8.1-35.el9.noarch rpcbind-1.2.6-7.el9.x86_64 rpm-4.16.1.3-39.el9.x86_64 rpm-build-4.16.1.3-39.el9.x86_64 rpm-build-libs-4.16.1.3-39.el9.x86_64 rpm-libs-4.16.1.3-39.el9.x86_64 rpmlint-1.11-19.el9.noarch rpm-plugin-audit-4.16.1.3-39.el9.x86_64 rpm-plugin-selinux-4.16.1.3-39.el9.x86_64 rpm-plugin-systemd-inhibit-4.16.1.3-39.el9.x86_64 rpm-sign-4.16.1.3-39.el9.x86_64 rpm-sign-libs-4.16.1.3-39.el9.x86_64 rsync-3.2.5-3.el9.x86_64 rsyslog-8.2506.0-2.el9.x86_64 rsyslog-logrotate-8.2506.0-2.el9.x86_64 ruby-3.0.7-165.el9.x86_64 ruby-default-gems-3.0.7-165.el9.noarch ruby-devel-3.0.7-165.el9.x86_64 rubygem-bigdecimal-3.0.0-165.el9.x86_64 rubygem-bundler-2.2.33-165.el9.noarch rubygem-io-console-0.5.7-165.el9.x86_64 rubygem-json-2.5.1-165.el9.x86_64 rubygem-psych-3.3.2-165.el9.x86_64 rubygem-rdoc-6.3.4.1-165.el9.noarch rubygems-3.2.33-165.el9.noarch ruby-libs-3.0.7-165.el9.x86_64 rust-srpm-macros-17-4.el9.noarch samba-client-libs-4.22.4-6.el9.x86_64 samba-common-4.22.4-6.el9.noarch samba-common-libs-4.22.4-6.el9.x86_64 sed-4.8-9.el9.x86_64 selinux-policy-38.1.65-1.el9.noarch selinux-policy-targeted-38.1.65-1.el9.noarch setroubleshoot-plugins-3.3.14-4.el9.noarch setroubleshoot-server-3.3.35-2.el9.x86_64 setup-2.13.7-10.el9.noarch sg3_utils-1.47-10.el9.x86_64 sg3_utils-libs-1.47-10.el9.x86_64 shadow-utils-4.9-15.el9.x86_64 shadow-utils-subid-4.9-15.el9.x86_64 shared-mime-info-2.1-5.el9.x86_64 skopeo-1.20.0-1.el9.x86_64 slang-2.3.2-11.el9.x86_64 slirp4netns-1.3.3-1.el9.x86_64 snappy-1.1.8-8.el9.x86_64 sos-4.10.0-2.el9.noarch sqlite-3.34.1-8.el9.x86_64 sqlite-libs-3.34.1-8.el9.x86_64 squashfs-tools-4.4-10.git1.el9.x86_64 sscg-3.0.0-10.el9.x86_64 sshpass-1.09-4.el9.x86_64 sssd-client-2.9.7-4.el9.x86_64 sssd-common-2.9.7-4.el9.x86_64 sssd-kcm-2.9.7-4.el9.x86_64 sssd-nfs-idmap-2.9.7-4.el9.x86_64 sudo-1.9.5p2-13.el9.x86_64 systemd-252-55.el9.x86_64 systemd-devel-252-55.el9.x86_64 systemd-libs-252-55.el9.x86_64 systemd-pam-252-55.el9.x86_64 systemd-rpm-macros-252-55.el9.noarch systemd-udev-252-55.el9.x86_64 tar-1.34-7.el9.x86_64 tcl-8.6.10-7.el9.x86_64 tcpdump-4.99.0-9.el9.x86_64 teamd-1.31-16.el9.x86_64 time-1.9-18.el9.x86_64 tmux-3.2a-5.el9.x86_64 tpm2-tss-3.2.3-1.el9.x86_64 traceroute-2.1.1-1.el9.x86_64 tzdata-2025b-2.el9.noarch unzip-6.0-59.el9.x86_64 userspace-rcu-0.12.1-6.el9.x86_64 util-linux-2.37.4-21.el9.x86_64 util-linux-core-2.37.4-21.el9.x86_64 vim-minimal-8.2.2637-22.el9.x86_64 virt-install-5.0.0-1.el9.noarch virt-manager-common-5.0.0-1.el9.noarch webkit2gtk3-jsc-2.48.5-1.el9.x86_64 wget-1.21.1-8.el9.x86_64 which-2.21-30.el9.x86_64 xfsprogs-6.4.0-7.el9.x86_64 xmlstarlet-1.6.1-20.el9.x86_64 xorriso-1.5.4-5.el9.x86_64 xz-5.2.5-8.el9.x86_64 xz-devel-5.2.5-8.el9.x86_64 xz-libs-5.2.5-8.el9.x86_64 yajl-2.1.0-25.el9.x86_64 yum-4.14.0-31.el9.noarch yum-utils-4.3.0-23.el9.noarch zip-3.0-35.el9.x86_64 zlib-1.2.11-41.el9.x86_64 zlib-devel-1.2.11-41.el9.x86_64 zstd-1.5.5-1.el9.x86_64 home/zuul/zuul-output/logs/python.log0000644000175000017500000000522015071030431017103 0ustar zuulzuulPython 3.9.23 pip 21.3.1 from /usr/lib/python3.9/site-packages/pip (python 3.9) ansible [core 2.15.13] config file = /etc/ansible/ansible.cfg configured module search path = ['/home/zuul/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /home/zuul/.local/lib/python3.9/site-packages/ansible ansible collection location = /home/zuul/.ansible/collections:/usr/share/ansible/collections executable location = /home/zuul/.local/bin/ansible python version = 3.9.23 (main, Aug 19 2025, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-11)] (/usr/bin/python3) jinja version = 3.1.6 libyaml = True ansible-core==2.15.13 argcomplete==1.12.0 attrs==25.4.0 autopage==0.5.2 Babel==2.9.1 cachetools==6.2.0 certifi==2025.10.5 cffi==2.0.0 chardet==4.0.0 charset-normalizer==3.4.3 cliff==4.9.1 cloud-init==24.4 cmd2==2.7.0 cockpit @ file:///builddir/build/BUILD/cockpit-347/tmp/wheel/cockpit-347-py3-none-any.whl configobj==5.0.6 cryptography==43.0.3 dasbus==1.7 dbus-python==1.2.18 debtcollector==3.0.0 decorator==5.2.1 distro==1.5.0 dogpile.cache==1.4.1 durationpy==0.10 file-magic==0.4.0 google-auth==2.41.1 gpg==1.15.1 idna==2.10 importlib-resources==5.0.7 importlib_metadata==8.7.0 iso8601==2.1.0 Jinja2==3.1.6 jmespath==1.0.1 jsonpatch==1.21 jsonpointer==2.0 jsonschema==4.23.0 jsonschema-specifications==2025.9.1 keystoneauth1==5.11.1 kubernetes==31.0.0 kubernetes-validate==1.31.0 libcomps==0.1.18 libvirt-python==10.10.0 lxml==4.6.5 markdown-it-py==3.0.0 MarkupSafe==3.0.3 mdurl==0.1.2 msgpack==1.1.1 netaddr==1.3.0 netifaces==0.10.6 oauthlib==3.2.2 openstacksdk==4.1.0 os-service-types==1.7.0 osc-lib==4.0.2 oslo.config==10.0.0 oslo.i18n==6.6.0 oslo.serialization==5.8.0 oslo.utils==9.1.0 packaging==20.9 pbr==7.0.1 pexpect==4.8.0 platformdirs==4.4.0 ply==3.11 prettytable==0.7.2 psutil==7.1.0 ptyprocess==0.6.0 pyasn1==0.6.1 pyasn1_modules==0.4.2 pycparser==2.23 pyenchant==3.2.0 Pygments==2.19.2 PyGObject==3.40.1 pyOpenSSL==24.2.1 pyparsing==2.4.7 pyperclip==1.11.0 pyrsistent==0.17.3 pyserial==3.4 PySocks==1.7.1 python-cinderclient==9.7.0 python-dateutil==2.8.1 python-keystoneclient==5.6.0 python-openstackclient==8.0.0 pytz==2021.1 PyYAML==5.4.1 referencing==0.36.2 requests==2.32.5 requests-oauthlib==2.0.0 requestsexceptions==1.4.0 resolvelib==0.5.4 rfc3986==2.0.0 rich==14.1.0 rich-argparse==1.7.1 rpds-py==0.27.1 rpm==4.16.1.3 rsa==4.9.1 selinux==3.6 sepolicy==3.6 setools==4.4.4 setroubleshoot @ file:///builddir/build/BUILD/setroubleshoot-3.3.35/src six==1.15.0 sos==4.10.0 stevedore==5.5.0 systemd-python==234 typing_extensions==4.15.0 tzdata==2025.2 urllib3==1.26.5 wcwidth==0.2.14 websocket-client==1.8.0 wrapt==1.17.3 zipp==3.23.0 home/zuul/zuul-output/logs/dmesg.log0000644000175000017500000015173415071030431016675 0ustar zuulzuul[Mon Oct 6 20:56:37 2025] Linux version 5.14.0-620.el9.x86_64 (mockbuild@x86-05.stream.rdu2.redhat.com) (gcc (GCC) 11.5.0 20240719 (Red Hat 11.5.0-11), GNU ld version 2.35.2-67.el9) #1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025 [Mon Oct 6 20:56:37 2025] The list of certified hardware and cloud instances for Red Hat Enterprise Linux 9 can be viewed at the Red Hat Ecosystem Catalog, https://catalog.redhat.com. [Mon Oct 6 20:56:37 2025] Command line: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 root=UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M [Mon Oct 6 20:56:37 2025] BIOS-provided physical RAM map: [Mon Oct 6 20:56:37 2025] BIOS-e820: [mem 0x0000000000000000-0x000000000009fbff] usable [Mon Oct 6 20:56:37 2025] BIOS-e820: [mem 0x000000000009fc00-0x000000000009ffff] reserved [Mon Oct 6 20:56:37 2025] BIOS-e820: [mem 0x00000000000f0000-0x00000000000fffff] reserved [Mon Oct 6 20:56:37 2025] BIOS-e820: [mem 0x0000000000100000-0x00000000bffdafff] usable [Mon Oct 6 20:56:37 2025] BIOS-e820: [mem 0x00000000bffdb000-0x00000000bfffffff] reserved [Mon Oct 6 20:56:37 2025] BIOS-e820: [mem 0x00000000feffc000-0x00000000feffffff] reserved [Mon Oct 6 20:56:37 2025] BIOS-e820: [mem 0x00000000fffc0000-0x00000000ffffffff] reserved [Mon Oct 6 20:56:37 2025] BIOS-e820: [mem 0x0000000100000000-0x000000013fffffff] usable [Mon Oct 6 20:56:37 2025] NX (Execute Disable) protection: active [Mon Oct 6 20:56:37 2025] APIC: Static calls initialized [Mon Oct 6 20:56:37 2025] SMBIOS 2.8 present. [Mon Oct 6 20:56:37 2025] DMI: OpenStack Foundation OpenStack Nova, BIOS 1.15.0-1 04/01/2014 [Mon Oct 6 20:56:37 2025] Hypervisor detected: KVM [Mon Oct 6 20:56:37 2025] kvm-clock: Using msrs 4b564d01 and 4b564d00 [Mon Oct 6 20:56:37 2025] kvm-clock: using sched offset of 3833010969 cycles [Mon Oct 6 20:56:37 2025] clocksource: kvm-clock: mask: 0xffffffffffffffff max_cycles: 0x1cd42e4dffb, max_idle_ns: 881590591483 ns [Mon Oct 6 20:56:37 2025] tsc: Detected 2800.000 MHz processor [Mon Oct 6 20:56:37 2025] e820: update [mem 0x00000000-0x00000fff] usable ==> reserved [Mon Oct 6 20:56:37 2025] e820: remove [mem 0x000a0000-0x000fffff] usable [Mon Oct 6 20:56:37 2025] last_pfn = 0x140000 max_arch_pfn = 0x400000000 [Mon Oct 6 20:56:37 2025] MTRR map: 4 entries (3 fixed + 1 variable; max 19), built from 8 variable MTRRs [Mon Oct 6 20:56:37 2025] x86/PAT: Configuration [0-7]: WB WC UC- UC WB WP UC- WT [Mon Oct 6 20:56:37 2025] last_pfn = 0xbffdb max_arch_pfn = 0x400000000 [Mon Oct 6 20:56:37 2025] found SMP MP-table at [mem 0x000f5b60-0x000f5b6f] [Mon Oct 6 20:56:37 2025] Using GB pages for direct mapping [Mon Oct 6 20:56:37 2025] RAMDISK: [mem 0x2d7c4000-0x32bd9fff] [Mon Oct 6 20:56:37 2025] ACPI: Early table checksum verification disabled [Mon Oct 6 20:56:37 2025] ACPI: RSDP 0x00000000000F5910 000014 (v00 BOCHS ) [Mon Oct 6 20:56:37 2025] ACPI: RSDT 0x00000000BFFE1848 000030 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Mon Oct 6 20:56:37 2025] ACPI: FACP 0x00000000BFFE172C 000074 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Mon Oct 6 20:56:37 2025] ACPI: DSDT 0x00000000BFFE0040 0016EC (v01 BOCHS BXPC 00000001 BXPC 00000001) [Mon Oct 6 20:56:37 2025] ACPI: FACS 0x00000000BFFE0000 000040 [Mon Oct 6 20:56:37 2025] ACPI: APIC 0x00000000BFFE17A0 000080 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Mon Oct 6 20:56:37 2025] ACPI: WAET 0x00000000BFFE1820 000028 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Mon Oct 6 20:56:37 2025] ACPI: Reserving FACP table memory at [mem 0xbffe172c-0xbffe179f] [Mon Oct 6 20:56:37 2025] ACPI: Reserving DSDT table memory at [mem 0xbffe0040-0xbffe172b] [Mon Oct 6 20:56:37 2025] ACPI: Reserving FACS table memory at [mem 0xbffe0000-0xbffe003f] [Mon Oct 6 20:56:37 2025] ACPI: Reserving APIC table memory at [mem 0xbffe17a0-0xbffe181f] [Mon Oct 6 20:56:37 2025] ACPI: Reserving WAET table memory at [mem 0xbffe1820-0xbffe1847] [Mon Oct 6 20:56:37 2025] No NUMA configuration found [Mon Oct 6 20:56:37 2025] Faking a node at [mem 0x0000000000000000-0x000000013fffffff] [Mon Oct 6 20:56:37 2025] NODE_DATA(0) allocated [mem 0x13ffd5000-0x13fffffff] [Mon Oct 6 20:56:37 2025] crashkernel reserved: 0x00000000af000000 - 0x00000000bf000000 (256 MB) [Mon Oct 6 20:56:37 2025] Zone ranges: [Mon Oct 6 20:56:37 2025] DMA [mem 0x0000000000001000-0x0000000000ffffff] [Mon Oct 6 20:56:37 2025] DMA32 [mem 0x0000000001000000-0x00000000ffffffff] [Mon Oct 6 20:56:37 2025] Normal [mem 0x0000000100000000-0x000000013fffffff] [Mon Oct 6 20:56:37 2025] Device empty [Mon Oct 6 20:56:37 2025] Movable zone start for each node [Mon Oct 6 20:56:37 2025] Early memory node ranges [Mon Oct 6 20:56:37 2025] node 0: [mem 0x0000000000001000-0x000000000009efff] [Mon Oct 6 20:56:37 2025] node 0: [mem 0x0000000000100000-0x00000000bffdafff] [Mon Oct 6 20:56:37 2025] node 0: [mem 0x0000000100000000-0x000000013fffffff] [Mon Oct 6 20:56:37 2025] Initmem setup node 0 [mem 0x0000000000001000-0x000000013fffffff] [Mon Oct 6 20:56:37 2025] On node 0, zone DMA: 1 pages in unavailable ranges [Mon Oct 6 20:56:37 2025] On node 0, zone DMA: 97 pages in unavailable ranges [Mon Oct 6 20:56:37 2025] On node 0, zone Normal: 37 pages in unavailable ranges [Mon Oct 6 20:56:37 2025] ACPI: PM-Timer IO Port: 0x608 [Mon Oct 6 20:56:37 2025] ACPI: LAPIC_NMI (acpi_id[0xff] dfl dfl lint[0x1]) [Mon Oct 6 20:56:37 2025] IOAPIC[0]: apic_id 0, version 17, address 0xfec00000, GSI 0-23 [Mon Oct 6 20:56:37 2025] ACPI: INT_SRC_OVR (bus 0 bus_irq 0 global_irq 2 dfl dfl) [Mon Oct 6 20:56:37 2025] ACPI: INT_SRC_OVR (bus 0 bus_irq 5 global_irq 5 high level) [Mon Oct 6 20:56:37 2025] ACPI: INT_SRC_OVR (bus 0 bus_irq 9 global_irq 9 high level) [Mon Oct 6 20:56:37 2025] ACPI: INT_SRC_OVR (bus 0 bus_irq 10 global_irq 10 high level) [Mon Oct 6 20:56:37 2025] ACPI: INT_SRC_OVR (bus 0 bus_irq 11 global_irq 11 high level) [Mon Oct 6 20:56:37 2025] ACPI: Using ACPI (MADT) for SMP configuration information [Mon Oct 6 20:56:37 2025] TSC deadline timer available [Mon Oct 6 20:56:37 2025] CPU topo: Max. logical packages: 2 [Mon Oct 6 20:56:37 2025] CPU topo: Max. logical dies: 2 [Mon Oct 6 20:56:37 2025] CPU topo: Max. dies per package: 1 [Mon Oct 6 20:56:37 2025] CPU topo: Max. threads per core: 1 [Mon Oct 6 20:56:37 2025] CPU topo: Num. cores per package: 1 [Mon Oct 6 20:56:37 2025] CPU topo: Num. threads per package: 1 [Mon Oct 6 20:56:37 2025] CPU topo: Allowing 2 present CPUs plus 0 hotplug CPUs [Mon Oct 6 20:56:37 2025] kvm-guest: APIC: eoi() replaced with kvm_guest_apic_eoi_write() [Mon Oct 6 20:56:37 2025] PM: hibernation: Registered nosave memory: [mem 0x00000000-0x00000fff] [Mon Oct 6 20:56:37 2025] PM: hibernation: Registered nosave memory: [mem 0x0009f000-0x0009ffff] [Mon Oct 6 20:56:37 2025] PM: hibernation: Registered nosave memory: [mem 0x000a0000-0x000effff] [Mon Oct 6 20:56:37 2025] PM: hibernation: Registered nosave memory: [mem 0x000f0000-0x000fffff] [Mon Oct 6 20:56:37 2025] PM: hibernation: Registered nosave memory: [mem 0xbffdb000-0xbfffffff] [Mon Oct 6 20:56:37 2025] PM: hibernation: Registered nosave memory: [mem 0xc0000000-0xfeffbfff] [Mon Oct 6 20:56:37 2025] PM: hibernation: Registered nosave memory: [mem 0xfeffc000-0xfeffffff] [Mon Oct 6 20:56:37 2025] PM: hibernation: Registered nosave memory: [mem 0xff000000-0xfffbffff] [Mon Oct 6 20:56:37 2025] PM: hibernation: Registered nosave memory: [mem 0xfffc0000-0xffffffff] [Mon Oct 6 20:56:37 2025] [mem 0xc0000000-0xfeffbfff] available for PCI devices [Mon Oct 6 20:56:37 2025] Booting paravirtualized kernel on KVM [Mon Oct 6 20:56:37 2025] clocksource: refined-jiffies: mask: 0xffffffff max_cycles: 0xffffffff, max_idle_ns: 1910969940391419 ns [Mon Oct 6 20:56:37 2025] setup_percpu: NR_CPUS:8192 nr_cpumask_bits:2 nr_cpu_ids:2 nr_node_ids:1 [Mon Oct 6 20:56:37 2025] percpu: Embedded 64 pages/cpu s225280 r8192 d28672 u1048576 [Mon Oct 6 20:56:37 2025] pcpu-alloc: s225280 r8192 d28672 u1048576 alloc=1*2097152 [Mon Oct 6 20:56:37 2025] pcpu-alloc: [0] 0 1 [Mon Oct 6 20:56:37 2025] kvm-guest: PV spinlocks disabled, no host support [Mon Oct 6 20:56:37 2025] Kernel command line: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 root=UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M [Mon Oct 6 20:56:37 2025] Unknown kernel command line parameters "BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64", will be passed to user space. [Mon Oct 6 20:56:37 2025] random: crng init done [Mon Oct 6 20:56:37 2025] Dentry cache hash table entries: 524288 (order: 10, 4194304 bytes, linear) [Mon Oct 6 20:56:37 2025] Inode-cache hash table entries: 262144 (order: 9, 2097152 bytes, linear) [Mon Oct 6 20:56:37 2025] Fallback order for Node 0: 0 [Mon Oct 6 20:56:37 2025] Built 1 zonelists, mobility grouping on. Total pages: 1031899 [Mon Oct 6 20:56:37 2025] Policy zone: Normal [Mon Oct 6 20:56:37 2025] mem auto-init: stack:off, heap alloc:off, heap free:off [Mon Oct 6 20:56:37 2025] software IO TLB: area num 2. [Mon Oct 6 20:56:37 2025] SLUB: HWalign=64, Order=0-3, MinObjects=0, CPUs=2, Nodes=1 [Mon Oct 6 20:56:37 2025] ftrace: allocating 49370 entries in 193 pages [Mon Oct 6 20:56:37 2025] ftrace: allocated 193 pages with 3 groups [Mon Oct 6 20:56:37 2025] Dynamic Preempt: voluntary [Mon Oct 6 20:56:37 2025] rcu: Preemptible hierarchical RCU implementation. [Mon Oct 6 20:56:37 2025] rcu: RCU event tracing is enabled. [Mon Oct 6 20:56:37 2025] rcu: RCU restricting CPUs from NR_CPUS=8192 to nr_cpu_ids=2. [Mon Oct 6 20:56:37 2025] Trampoline variant of Tasks RCU enabled. [Mon Oct 6 20:56:37 2025] Rude variant of Tasks RCU enabled. [Mon Oct 6 20:56:37 2025] Tracing variant of Tasks RCU enabled. [Mon Oct 6 20:56:37 2025] rcu: RCU calculated value of scheduler-enlistment delay is 100 jiffies. [Mon Oct 6 20:56:37 2025] rcu: Adjusting geometry for rcu_fanout_leaf=16, nr_cpu_ids=2 [Mon Oct 6 20:56:37 2025] RCU Tasks: Setting shift to 1 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=2. [Mon Oct 6 20:56:37 2025] RCU Tasks Rude: Setting shift to 1 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=2. [Mon Oct 6 20:56:37 2025] RCU Tasks Trace: Setting shift to 1 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=2. [Mon Oct 6 20:56:37 2025] NR_IRQS: 524544, nr_irqs: 440, preallocated irqs: 16 [Mon Oct 6 20:56:37 2025] rcu: srcu_init: Setting srcu_struct sizes based on contention. [Mon Oct 6 20:56:37 2025] kfence: initialized - using 2097152 bytes for 255 objects at 0x(____ptrval____)-0x(____ptrval____) [Mon Oct 6 20:56:37 2025] Console: colour VGA+ 80x25 [Mon Oct 6 20:56:37 2025] printk: console [ttyS0] enabled [Mon Oct 6 20:56:37 2025] ACPI: Core revision 20230331 [Mon Oct 6 20:56:37 2025] APIC: Switch to symmetric I/O mode setup [Mon Oct 6 20:56:37 2025] x2apic enabled [Mon Oct 6 20:56:37 2025] APIC: Switched APIC routing to: physical x2apic [Mon Oct 6 20:56:37 2025] tsc: Marking TSC unstable due to TSCs unsynchronized [Mon Oct 6 20:56:37 2025] Calibrating delay loop (skipped) preset value.. 5600.00 BogoMIPS (lpj=2800000) [Mon Oct 6 20:56:37 2025] x86/cpu: User Mode Instruction Prevention (UMIP) activated [Mon Oct 6 20:56:37 2025] Last level iTLB entries: 4KB 512, 2MB 255, 4MB 127 [Mon Oct 6 20:56:37 2025] Last level dTLB entries: 4KB 512, 2MB 255, 4MB 127, 1GB 0 [Mon Oct 6 20:56:37 2025] Spectre V1 : Mitigation: usercopy/swapgs barriers and __user pointer sanitization [Mon Oct 6 20:56:37 2025] Spectre V2 : Mitigation: Retpolines [Mon Oct 6 20:56:37 2025] Spectre V2 : Spectre v2 / SpectreRSB: Filling RSB on context switch and VMEXIT [Mon Oct 6 20:56:37 2025] Spectre V2 : Enabling Speculation Barrier for firmware calls [Mon Oct 6 20:56:37 2025] RETBleed: Mitigation: untrained return thunk [Mon Oct 6 20:56:37 2025] Spectre V2 : mitigation: Enabling conditional Indirect Branch Prediction Barrier [Mon Oct 6 20:56:37 2025] Speculative Store Bypass: Mitigation: Speculative Store Bypass disabled via prctl [Mon Oct 6 20:56:37 2025] Speculative Return Stack Overflow: IBPB-extending microcode not applied! [Mon Oct 6 20:56:37 2025] Speculative Return Stack Overflow: WARNING: See https://kernel.org/doc/html/latest/admin-guide/hw-vuln/srso.html for mitigation options. [Mon Oct 6 20:56:37 2025] x86/bugs: return thunk changed [Mon Oct 6 20:56:37 2025] Speculative Return Stack Overflow: Vulnerable: Safe RET, no microcode [Mon Oct 6 20:56:37 2025] x86/fpu: Supporting XSAVE feature 0x001: 'x87 floating point registers' [Mon Oct 6 20:56:37 2025] x86/fpu: Supporting XSAVE feature 0x002: 'SSE registers' [Mon Oct 6 20:56:37 2025] x86/fpu: Supporting XSAVE feature 0x004: 'AVX registers' [Mon Oct 6 20:56:37 2025] x86/fpu: xstate_offset[2]: 576, xstate_sizes[2]: 256 [Mon Oct 6 20:56:37 2025] x86/fpu: Enabled xstate features 0x7, context size is 832 bytes, using 'compacted' format. [Mon Oct 6 20:56:37 2025] Freeing SMP alternatives memory: 40K [Mon Oct 6 20:56:37 2025] pid_max: default: 32768 minimum: 301 [Mon Oct 6 20:56:37 2025] LSM: initializing lsm=lockdown,capability,landlock,yama,integrity,selinux,bpf [Mon Oct 6 20:56:37 2025] landlock: Up and running. [Mon Oct 6 20:56:37 2025] Yama: becoming mindful. [Mon Oct 6 20:56:37 2025] SELinux: Initializing. [Mon Oct 6 20:56:37 2025] LSM support for eBPF active [Mon Oct 6 20:56:37 2025] Mount-cache hash table entries: 8192 (order: 4, 65536 bytes, linear) [Mon Oct 6 20:56:37 2025] Mountpoint-cache hash table entries: 8192 (order: 4, 65536 bytes, linear) [Mon Oct 6 20:56:37 2025] smpboot: CPU0: AMD EPYC-Rome Processor (family: 0x17, model: 0x31, stepping: 0x0) [Mon Oct 6 20:56:37 2025] Performance Events: Fam17h+ core perfctr, AMD PMU driver. [Mon Oct 6 20:56:37 2025] ... version: 0 [Mon Oct 6 20:56:37 2025] ... bit width: 48 [Mon Oct 6 20:56:37 2025] ... generic registers: 6 [Mon Oct 6 20:56:37 2025] ... value mask: 0000ffffffffffff [Mon Oct 6 20:56:37 2025] ... max period: 00007fffffffffff [Mon Oct 6 20:56:37 2025] ... fixed-purpose events: 0 [Mon Oct 6 20:56:37 2025] ... event mask: 000000000000003f [Mon Oct 6 20:56:37 2025] signal: max sigframe size: 1776 [Mon Oct 6 20:56:37 2025] rcu: Hierarchical SRCU implementation. [Mon Oct 6 20:56:37 2025] rcu: Max phase no-delay instances is 400. [Mon Oct 6 20:56:37 2025] smp: Bringing up secondary CPUs ... [Mon Oct 6 20:56:37 2025] smpboot: x86: Booting SMP configuration: [Mon Oct 6 20:56:37 2025] .... node #0, CPUs: #1 [Mon Oct 6 20:56:37 2025] smp: Brought up 1 node, 2 CPUs [Mon Oct 6 20:56:37 2025] smpboot: Total of 2 processors activated (11200.00 BogoMIPS) [Mon Oct 6 20:56:37 2025] node 0 deferred pages initialised in 10ms [Mon Oct 6 20:56:37 2025] Memory: 3646860K/4193764K available (16384K kernel code, 5784K rwdata, 13996K rodata, 4068K init, 7304K bss, 543240K reserved, 0K cma-reserved) [Mon Oct 6 20:56:37 2025] devtmpfs: initialized [Mon Oct 6 20:56:37 2025] x86/mm: Memory block size: 128MB [Mon Oct 6 20:56:37 2025] clocksource: jiffies: mask: 0xffffffff max_cycles: 0xffffffff, max_idle_ns: 1911260446275000 ns [Mon Oct 6 20:56:37 2025] futex hash table entries: 512 (order: 3, 32768 bytes, linear) [Mon Oct 6 20:56:37 2025] pinctrl core: initialized pinctrl subsystem [Mon Oct 6 20:56:37 2025] NET: Registered PF_NETLINK/PF_ROUTE protocol family [Mon Oct 6 20:56:37 2025] DMA: preallocated 512 KiB GFP_KERNEL pool for atomic allocations [Mon Oct 6 20:56:37 2025] DMA: preallocated 512 KiB GFP_KERNEL|GFP_DMA pool for atomic allocations [Mon Oct 6 20:56:37 2025] DMA: preallocated 512 KiB GFP_KERNEL|GFP_DMA32 pool for atomic allocations [Mon Oct 6 20:56:37 2025] audit: initializing netlink subsys (disabled) [Mon Oct 6 20:56:37 2025] thermal_sys: Registered thermal governor 'fair_share' [Mon Oct 6 20:56:37 2025] thermal_sys: Registered thermal governor 'step_wise' [Mon Oct 6 20:56:37 2025] thermal_sys: Registered thermal governor 'user_space' [Mon Oct 6 20:56:37 2025] audit: type=2000 audit(1759784197.403:1): state=initialized audit_enabled=0 res=1 [Mon Oct 6 20:56:37 2025] cpuidle: using governor menu [Mon Oct 6 20:56:37 2025] acpiphp: ACPI Hot Plug PCI Controller Driver version: 0.5 [Mon Oct 6 20:56:37 2025] PCI: Using configuration type 1 for base access [Mon Oct 6 20:56:37 2025] PCI: Using configuration type 1 for extended access [Mon Oct 6 20:56:37 2025] kprobes: kprobe jump-optimization is enabled. All kprobes are optimized if possible. [Mon Oct 6 20:56:37 2025] HugeTLB: registered 1.00 GiB page size, pre-allocated 0 pages [Mon Oct 6 20:56:37 2025] HugeTLB: 16380 KiB vmemmap can be freed for a 1.00 GiB page [Mon Oct 6 20:56:37 2025] HugeTLB: registered 2.00 MiB page size, pre-allocated 0 pages [Mon Oct 6 20:56:37 2025] HugeTLB: 28 KiB vmemmap can be freed for a 2.00 MiB page [Mon Oct 6 20:56:37 2025] Demotion targets for Node 0: null [Mon Oct 6 20:56:37 2025] cryptd: max_cpu_qlen set to 1000 [Mon Oct 6 20:56:37 2025] ACPI: Added _OSI(Module Device) [Mon Oct 6 20:56:37 2025] ACPI: Added _OSI(Processor Device) [Mon Oct 6 20:56:37 2025] ACPI: Added _OSI(3.0 _SCP Extensions) [Mon Oct 6 20:56:37 2025] ACPI: Added _OSI(Processor Aggregator Device) [Mon Oct 6 20:56:37 2025] ACPI: 1 ACPI AML tables successfully acquired and loaded [Mon Oct 6 20:56:37 2025] ACPI: _OSC evaluation for CPUs failed, trying _PDC [Mon Oct 6 20:56:37 2025] ACPI: Interpreter enabled [Mon Oct 6 20:56:37 2025] ACPI: PM: (supports S0 S3 S4 S5) [Mon Oct 6 20:56:37 2025] ACPI: Using IOAPIC for interrupt routing [Mon Oct 6 20:56:37 2025] PCI: Using host bridge windows from ACPI; if necessary, use "pci=nocrs" and report a bug [Mon Oct 6 20:56:37 2025] PCI: Using E820 reservations for host bridge windows [Mon Oct 6 20:56:37 2025] ACPI: Enabled 2 GPEs in block 00 to 0F [Mon Oct 6 20:56:37 2025] ACPI: PCI Root Bridge [PCI0] (domain 0000 [bus 00-ff]) [Mon Oct 6 20:56:37 2025] acpi PNP0A03:00: _OSC: OS supports [ExtendedConfig ASPM ClockPM Segments MSI EDR HPX-Type3] [Mon Oct 6 20:56:37 2025] acpiphp: Slot [3] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [4] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [5] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [6] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [7] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [8] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [9] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [10] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [11] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [12] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [13] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [14] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [15] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [16] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [17] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [18] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [19] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [20] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [21] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [22] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [23] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [24] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [25] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [26] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [27] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [28] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [29] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [30] registered [Mon Oct 6 20:56:37 2025] acpiphp: Slot [31] registered [Mon Oct 6 20:56:37 2025] PCI host bridge to bus 0000:00 [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: root bus resource [io 0x0000-0x0cf7 window] [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: root bus resource [io 0x0d00-0xffff window] [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: root bus resource [mem 0x000a0000-0x000bffff window] [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: root bus resource [mem 0xc0000000-0xfebfffff window] [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: root bus resource [mem 0x140000000-0x1bfffffff window] [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: root bus resource [bus 00-ff] [Mon Oct 6 20:56:37 2025] pci 0000:00:00.0: [8086:1237] type 00 class 0x060000 conventional PCI endpoint [Mon Oct 6 20:56:37 2025] pci 0000:00:01.0: [8086:7000] type 00 class 0x060100 conventional PCI endpoint [Mon Oct 6 20:56:37 2025] pci 0000:00:01.1: [8086:7010] type 00 class 0x010180 conventional PCI endpoint [Mon Oct 6 20:56:37 2025] pci 0000:00:01.1: BAR 4 [io 0xc140-0xc14f] [Mon Oct 6 20:56:37 2025] pci 0000:00:01.1: BAR 0 [io 0x01f0-0x01f7]: legacy IDE quirk [Mon Oct 6 20:56:37 2025] pci 0000:00:01.1: BAR 1 [io 0x03f6]: legacy IDE quirk [Mon Oct 6 20:56:37 2025] pci 0000:00:01.1: BAR 2 [io 0x0170-0x0177]: legacy IDE quirk [Mon Oct 6 20:56:37 2025] pci 0000:00:01.1: BAR 3 [io 0x0376]: legacy IDE quirk [Mon Oct 6 20:56:37 2025] pci 0000:00:01.2: [8086:7020] type 00 class 0x0c0300 conventional PCI endpoint [Mon Oct 6 20:56:37 2025] pci 0000:00:01.2: BAR 4 [io 0xc100-0xc11f] [Mon Oct 6 20:56:37 2025] pci 0000:00:01.3: [8086:7113] type 00 class 0x068000 conventional PCI endpoint [Mon Oct 6 20:56:37 2025] pci 0000:00:01.3: quirk: [io 0x0600-0x063f] claimed by PIIX4 ACPI [Mon Oct 6 20:56:37 2025] pci 0000:00:01.3: quirk: [io 0x0700-0x070f] claimed by PIIX4 SMB [Mon Oct 6 20:56:37 2025] pci 0000:00:02.0: [1af4:1050] type 00 class 0x030000 conventional PCI endpoint [Mon Oct 6 20:56:37 2025] pci 0000:00:02.0: BAR 0 [mem 0xfe000000-0xfe7fffff pref] [Mon Oct 6 20:56:37 2025] pci 0000:00:02.0: BAR 2 [mem 0xfe800000-0xfe803fff 64bit pref] [Mon Oct 6 20:56:37 2025] pci 0000:00:02.0: BAR 4 [mem 0xfeb90000-0xfeb90fff] [Mon Oct 6 20:56:37 2025] pci 0000:00:02.0: ROM [mem 0xfeb80000-0xfeb8ffff pref] [Mon Oct 6 20:56:37 2025] pci 0000:00:02.0: Video device with shadowed ROM at [mem 0x000c0000-0x000dffff] [Mon Oct 6 20:56:37 2025] pci 0000:00:03.0: [1af4:1000] type 00 class 0x020000 conventional PCI endpoint [Mon Oct 6 20:56:37 2025] pci 0000:00:03.0: BAR 0 [io 0xc080-0xc0bf] [Mon Oct 6 20:56:37 2025] pci 0000:00:03.0: BAR 1 [mem 0xfeb91000-0xfeb91fff] [Mon Oct 6 20:56:37 2025] pci 0000:00:03.0: BAR 4 [mem 0xfe804000-0xfe807fff 64bit pref] [Mon Oct 6 20:56:37 2025] pci 0000:00:03.0: ROM [mem 0xfeb00000-0xfeb7ffff pref] [Mon Oct 6 20:56:37 2025] pci 0000:00:04.0: [1af4:1001] type 00 class 0x010000 conventional PCI endpoint [Mon Oct 6 20:56:37 2025] pci 0000:00:04.0: BAR 0 [io 0xc000-0xc07f] [Mon Oct 6 20:56:37 2025] pci 0000:00:04.0: BAR 1 [mem 0xfeb92000-0xfeb92fff] [Mon Oct 6 20:56:37 2025] pci 0000:00:04.0: BAR 4 [mem 0xfe808000-0xfe80bfff 64bit pref] [Mon Oct 6 20:56:37 2025] pci 0000:00:05.0: [1af4:1002] type 00 class 0x00ff00 conventional PCI endpoint [Mon Oct 6 20:56:37 2025] pci 0000:00:05.0: BAR 0 [io 0xc0c0-0xc0ff] [Mon Oct 6 20:56:37 2025] pci 0000:00:05.0: BAR 4 [mem 0xfe80c000-0xfe80ffff 64bit pref] [Mon Oct 6 20:56:37 2025] pci 0000:00:06.0: [1af4:1005] type 00 class 0x00ff00 conventional PCI endpoint [Mon Oct 6 20:56:37 2025] pci 0000:00:06.0: BAR 0 [io 0xc120-0xc13f] [Mon Oct 6 20:56:37 2025] pci 0000:00:06.0: BAR 4 [mem 0xfe810000-0xfe813fff 64bit pref] [Mon Oct 6 20:56:37 2025] ACPI: PCI: Interrupt link LNKA configured for IRQ 10 [Mon Oct 6 20:56:37 2025] ACPI: PCI: Interrupt link LNKB configured for IRQ 10 [Mon Oct 6 20:56:37 2025] ACPI: PCI: Interrupt link LNKC configured for IRQ 11 [Mon Oct 6 20:56:37 2025] ACPI: PCI: Interrupt link LNKD configured for IRQ 11 [Mon Oct 6 20:56:37 2025] ACPI: PCI: Interrupt link LNKS configured for IRQ 9 [Mon Oct 6 20:56:37 2025] iommu: Default domain type: Translated [Mon Oct 6 20:56:37 2025] iommu: DMA domain TLB invalidation policy: lazy mode [Mon Oct 6 20:56:37 2025] SCSI subsystem initialized [Mon Oct 6 20:56:37 2025] ACPI: bus type USB registered [Mon Oct 6 20:56:37 2025] usbcore: registered new interface driver usbfs [Mon Oct 6 20:56:37 2025] usbcore: registered new interface driver hub [Mon Oct 6 20:56:37 2025] usbcore: registered new device driver usb [Mon Oct 6 20:56:37 2025] pps_core: LinuxPPS API ver. 1 registered [Mon Oct 6 20:56:37 2025] pps_core: Software ver. 5.3.6 - Copyright 2005-2007 Rodolfo Giometti [Mon Oct 6 20:56:37 2025] PTP clock support registered [Mon Oct 6 20:56:37 2025] EDAC MC: Ver: 3.0.0 [Mon Oct 6 20:56:37 2025] NetLabel: Initializing [Mon Oct 6 20:56:37 2025] NetLabel: domain hash size = 128 [Mon Oct 6 20:56:37 2025] NetLabel: protocols = UNLABELED CIPSOv4 CALIPSO [Mon Oct 6 20:56:37 2025] NetLabel: unlabeled traffic allowed by default [Mon Oct 6 20:56:37 2025] PCI: Using ACPI for IRQ routing [Mon Oct 6 20:56:37 2025] PCI: pci_cache_line_size set to 64 bytes [Mon Oct 6 20:56:37 2025] e820: reserve RAM buffer [mem 0x0009fc00-0x0009ffff] [Mon Oct 6 20:56:37 2025] e820: reserve RAM buffer [mem 0xbffdb000-0xbfffffff] [Mon Oct 6 20:56:37 2025] pci 0000:00:02.0: vgaarb: setting as boot VGA device [Mon Oct 6 20:56:37 2025] pci 0000:00:02.0: vgaarb: bridge control possible [Mon Oct 6 20:56:37 2025] pci 0000:00:02.0: vgaarb: VGA device added: decodes=io+mem,owns=io+mem,locks=none [Mon Oct 6 20:56:37 2025] vgaarb: loaded [Mon Oct 6 20:56:37 2025] clocksource: Switched to clocksource kvm-clock [Mon Oct 6 20:56:37 2025] VFS: Disk quotas dquot_6.6.0 [Mon Oct 6 20:56:37 2025] VFS: Dquot-cache hash table entries: 512 (order 0, 4096 bytes) [Mon Oct 6 20:56:37 2025] pnp: PnP ACPI init [Mon Oct 6 20:56:37 2025] pnp 00:03: [dma 2] [Mon Oct 6 20:56:37 2025] pnp: PnP ACPI: found 5 devices [Mon Oct 6 20:56:37 2025] clocksource: acpi_pm: mask: 0xffffff max_cycles: 0xffffff, max_idle_ns: 2085701024 ns [Mon Oct 6 20:56:37 2025] NET: Registered PF_INET protocol family [Mon Oct 6 20:56:37 2025] IP idents hash table entries: 65536 (order: 7, 524288 bytes, linear) [Mon Oct 6 20:56:37 2025] tcp_listen_portaddr_hash hash table entries: 2048 (order: 3, 32768 bytes, linear) [Mon Oct 6 20:56:37 2025] Table-perturb hash table entries: 65536 (order: 6, 262144 bytes, linear) [Mon Oct 6 20:56:37 2025] TCP established hash table entries: 32768 (order: 6, 262144 bytes, linear) [Mon Oct 6 20:56:37 2025] TCP bind hash table entries: 32768 (order: 7, 524288 bytes, linear) [Mon Oct 6 20:56:37 2025] TCP: Hash tables configured (established 32768 bind 32768) [Mon Oct 6 20:56:37 2025] MPTCP token hash table entries: 4096 (order: 4, 98304 bytes, linear) [Mon Oct 6 20:56:37 2025] UDP hash table entries: 2048 (order: 4, 65536 bytes, linear) [Mon Oct 6 20:56:37 2025] UDP-Lite hash table entries: 2048 (order: 4, 65536 bytes, linear) [Mon Oct 6 20:56:37 2025] NET: Registered PF_UNIX/PF_LOCAL protocol family [Mon Oct 6 20:56:37 2025] NET: Registered PF_XDP protocol family [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: resource 4 [io 0x0000-0x0cf7 window] [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: resource 5 [io 0x0d00-0xffff window] [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: resource 6 [mem 0x000a0000-0x000bffff window] [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: resource 7 [mem 0xc0000000-0xfebfffff window] [Mon Oct 6 20:56:37 2025] pci_bus 0000:00: resource 8 [mem 0x140000000-0x1bfffffff window] [Mon Oct 6 20:56:37 2025] pci 0000:00:01.0: PIIX3: Enabling Passive Release [Mon Oct 6 20:56:37 2025] pci 0000:00:00.0: Limiting direct PCI/PCI transfers [Mon Oct 6 20:56:37 2025] ACPI: \_SB_.LNKD: Enabled at IRQ 11 [Mon Oct 6 20:56:37 2025] pci 0000:00:01.2: quirk_usb_early_handoff+0x0/0x140 took 80725 usecs [Mon Oct 6 20:56:37 2025] PCI: CLS 0 bytes, default 64 [Mon Oct 6 20:56:37 2025] PCI-DMA: Using software bounce buffering for IO (SWIOTLB) [Mon Oct 6 20:56:37 2025] software IO TLB: mapped [mem 0x00000000ab000000-0x00000000af000000] (64MB) [Mon Oct 6 20:56:37 2025] ACPI: bus type thunderbolt registered [Mon Oct 6 20:56:37 2025] Trying to unpack rootfs image as initramfs... [Mon Oct 6 20:56:37 2025] Initialise system trusted keyrings [Mon Oct 6 20:56:37 2025] Key type blacklist registered [Mon Oct 6 20:56:37 2025] workingset: timestamp_bits=36 max_order=20 bucket_order=0 [Mon Oct 6 20:56:37 2025] zbud: loaded [Mon Oct 6 20:56:37 2025] integrity: Platform Keyring initialized [Mon Oct 6 20:56:37 2025] integrity: Machine keyring initialized [Mon Oct 6 20:56:37 2025] Freeing initrd memory: 86104K [Mon Oct 6 20:56:37 2025] NET: Registered PF_ALG protocol family [Mon Oct 6 20:56:37 2025] xor: automatically using best checksumming function avx [Mon Oct 6 20:56:37 2025] Key type asymmetric registered [Mon Oct 6 20:56:37 2025] Asymmetric key parser 'x509' registered [Mon Oct 6 20:56:37 2025] Block layer SCSI generic (bsg) driver version 0.4 loaded (major 246) [Mon Oct 6 20:56:37 2025] io scheduler mq-deadline registered [Mon Oct 6 20:56:37 2025] io scheduler kyber registered [Mon Oct 6 20:56:37 2025] io scheduler bfq registered [Mon Oct 6 20:56:37 2025] atomic64_test: passed for x86-64 platform with CX8 and with SSE [Mon Oct 6 20:56:37 2025] shpchp: Standard Hot Plug PCI Controller Driver version: 0.4 [Mon Oct 6 20:56:37 2025] input: Power Button as /devices/LNXSYSTM:00/LNXPWRBN:00/input/input0 [Mon Oct 6 20:56:37 2025] ACPI: button: Power Button [PWRF] [Mon Oct 6 20:56:38 2025] ACPI: \_SB_.LNKB: Enabled at IRQ 10 [Mon Oct 6 20:56:38 2025] ACPI: \_SB_.LNKC: Enabled at IRQ 11 [Mon Oct 6 20:56:38 2025] ACPI: \_SB_.LNKA: Enabled at IRQ 10 [Mon Oct 6 20:56:38 2025] Serial: 8250/16550 driver, 4 ports, IRQ sharing enabled [Mon Oct 6 20:56:38 2025] 00:00: ttyS0 at I/O 0x3f8 (irq = 4, base_baud = 115200) is a 16550A [Mon Oct 6 20:56:38 2025] Non-volatile memory driver v1.3 [Mon Oct 6 20:56:38 2025] rdac: device handler registered [Mon Oct 6 20:56:38 2025] hp_sw: device handler registered [Mon Oct 6 20:56:38 2025] emc: device handler registered [Mon Oct 6 20:56:38 2025] alua: device handler registered [Mon Oct 6 20:56:38 2025] uhci_hcd 0000:00:01.2: UHCI Host Controller [Mon Oct 6 20:56:38 2025] uhci_hcd 0000:00:01.2: new USB bus registered, assigned bus number 1 [Mon Oct 6 20:56:38 2025] uhci_hcd 0000:00:01.2: detected 2 ports [Mon Oct 6 20:56:38 2025] uhci_hcd 0000:00:01.2: irq 11, io port 0x0000c100 [Mon Oct 6 20:56:38 2025] usb usb1: New USB device found, idVendor=1d6b, idProduct=0001, bcdDevice= 5.14 [Mon Oct 6 20:56:38 2025] usb usb1: New USB device strings: Mfr=3, Product=2, SerialNumber=1 [Mon Oct 6 20:56:38 2025] usb usb1: Product: UHCI Host Controller [Mon Oct 6 20:56:38 2025] usb usb1: Manufacturer: Linux 5.14.0-620.el9.x86_64 uhci_hcd [Mon Oct 6 20:56:38 2025] usb usb1: SerialNumber: 0000:00:01.2 [Mon Oct 6 20:56:38 2025] hub 1-0:1.0: USB hub found [Mon Oct 6 20:56:38 2025] hub 1-0:1.0: 2 ports detected [Mon Oct 6 20:56:38 2025] usbcore: registered new interface driver usbserial_generic [Mon Oct 6 20:56:38 2025] usbserial: USB Serial support registered for generic [Mon Oct 6 20:56:38 2025] i8042: PNP: PS/2 Controller [PNP0303:KBD,PNP0f13:MOU] at 0x60,0x64 irq 1,12 [Mon Oct 6 20:56:38 2025] serio: i8042 KBD port at 0x60,0x64 irq 1 [Mon Oct 6 20:56:38 2025] serio: i8042 AUX port at 0x60,0x64 irq 12 [Mon Oct 6 20:56:38 2025] mousedev: PS/2 mouse device common for all mice [Mon Oct 6 20:56:38 2025] rtc_cmos 00:04: RTC can wake from S4 [Mon Oct 6 20:56:38 2025] rtc_cmos 00:04: registered as rtc0 [Mon Oct 6 20:56:38 2025] rtc_cmos 00:04: setting system clock to 2025-10-06T20:56:38 UTC (1759784198) [Mon Oct 6 20:56:38 2025] rtc_cmos 00:04: alarms up to one day, y3k, 242 bytes nvram [Mon Oct 6 20:56:38 2025] amd_pstate: the _CPC object is not present in SBIOS or ACPI disabled [Mon Oct 6 20:56:38 2025] hid: raw HID events driver (C) Jiri Kosina [Mon Oct 6 20:56:38 2025] usbcore: registered new interface driver usbhid [Mon Oct 6 20:56:38 2025] usbhid: USB HID core driver [Mon Oct 6 20:56:38 2025] drop_monitor: Initializing network drop monitor service [Mon Oct 6 20:56:38 2025] input: AT Translated Set 2 keyboard as /devices/platform/i8042/serio0/input/input1 [Mon Oct 6 20:56:38 2025] input: VirtualPS/2 VMware VMMouse as /devices/platform/i8042/serio1/input/input4 [Mon Oct 6 20:56:38 2025] input: VirtualPS/2 VMware VMMouse as /devices/platform/i8042/serio1/input/input3 [Mon Oct 6 20:56:38 2025] Initializing XFRM netlink socket [Mon Oct 6 20:56:38 2025] NET: Registered PF_INET6 protocol family [Mon Oct 6 20:56:38 2025] Segment Routing with IPv6 [Mon Oct 6 20:56:38 2025] NET: Registered PF_PACKET protocol family [Mon Oct 6 20:56:38 2025] mpls_gso: MPLS GSO support [Mon Oct 6 20:56:38 2025] IPI shorthand broadcast: enabled [Mon Oct 6 20:56:38 2025] AVX2 version of gcm_enc/dec engaged. [Mon Oct 6 20:56:38 2025] AES CTR mode by8 optimization enabled [Mon Oct 6 20:56:38 2025] sched_clock: Marking stable (1153014350, 146653360)->(1410889370, -111221660) [Mon Oct 6 20:56:38 2025] registered taskstats version 1 [Mon Oct 6 20:56:38 2025] Loading compiled-in X.509 certificates [Mon Oct 6 20:56:38 2025] Loaded X.509 cert 'The CentOS Project: CentOS Stream kernel signing key: 4ff821c4997fbb659836adb05f5bc400c914e148' [Mon Oct 6 20:56:38 2025] Loaded X.509 cert 'Red Hat Enterprise Linux Driver Update Program (key 3): bf57f3e87362bc7229d9f465321773dfd1f77a80' [Mon Oct 6 20:56:38 2025] Loaded X.509 cert 'Red Hat Enterprise Linux kpatch signing key: 4d38fd864ebe18c5f0b72e3852e2014c3a676fc8' [Mon Oct 6 20:56:38 2025] Loaded X.509 cert 'RH-IMA-CA: Red Hat IMA CA: fb31825dd0e073685b264e3038963673f753959a' [Mon Oct 6 20:56:38 2025] Loaded X.509 cert 'Nvidia GPU OOT signing 001: 55e1cef88193e60419f0b0ec379c49f77545acf0' [Mon Oct 6 20:56:38 2025] Demotion targets for Node 0: null [Mon Oct 6 20:56:38 2025] page_owner is disabled [Mon Oct 6 20:56:38 2025] Key type .fscrypt registered [Mon Oct 6 20:56:38 2025] Key type fscrypt-provisioning registered [Mon Oct 6 20:56:38 2025] Key type big_key registered [Mon Oct 6 20:56:38 2025] Key type encrypted registered [Mon Oct 6 20:56:38 2025] ima: No TPM chip found, activating TPM-bypass! [Mon Oct 6 20:56:38 2025] Loading compiled-in module X.509 certificates [Mon Oct 6 20:56:38 2025] Loaded X.509 cert 'The CentOS Project: CentOS Stream kernel signing key: 4ff821c4997fbb659836adb05f5bc400c914e148' [Mon Oct 6 20:56:38 2025] ima: Allocated hash algorithm: sha256 [Mon Oct 6 20:56:38 2025] ima: No architecture policies found [Mon Oct 6 20:56:38 2025] evm: Initialising EVM extended attributes: [Mon Oct 6 20:56:38 2025] evm: security.selinux [Mon Oct 6 20:56:38 2025] evm: security.SMACK64 (disabled) [Mon Oct 6 20:56:38 2025] evm: security.SMACK64EXEC (disabled) [Mon Oct 6 20:56:38 2025] evm: security.SMACK64TRANSMUTE (disabled) [Mon Oct 6 20:56:38 2025] evm: security.SMACK64MMAP (disabled) [Mon Oct 6 20:56:38 2025] evm: security.apparmor (disabled) [Mon Oct 6 20:56:38 2025] evm: security.ima [Mon Oct 6 20:56:38 2025] evm: security.capability [Mon Oct 6 20:56:38 2025] evm: HMAC attrs: 0x1 [Mon Oct 6 20:56:38 2025] usb 1-1: new full-speed USB device number 2 using uhci_hcd [Mon Oct 6 20:56:38 2025] Running certificate verification RSA selftest [Mon Oct 6 20:56:38 2025] Loaded X.509 cert 'Certificate verification self-testing key: f58703bb33ce1b73ee02eccdee5b8817518fe3db' [Mon Oct 6 20:56:38 2025] Running certificate verification ECDSA selftest [Mon Oct 6 20:56:38 2025] Loaded X.509 cert 'Certificate verification ECDSA self-testing key: 2900bcea1deb7bc8479a84a23d758efdfdd2b2d3' [Mon Oct 6 20:56:38 2025] clk: Disabling unused clocks [Mon Oct 6 20:56:38 2025] Freeing unused decrypted memory: 2028K [Mon Oct 6 20:56:38 2025] Freeing unused kernel image (initmem) memory: 4068K [Mon Oct 6 20:56:38 2025] Write protecting the kernel read-only data: 30720k [Mon Oct 6 20:56:38 2025] Freeing unused kernel image (rodata/data gap) memory: 340K [Mon Oct 6 20:56:38 2025] x86/mm: Checked W+X mappings: passed, no W+X pages found. [Mon Oct 6 20:56:38 2025] Run /init as init process [Mon Oct 6 20:56:38 2025] with arguments: [Mon Oct 6 20:56:38 2025] /init [Mon Oct 6 20:56:38 2025] with environment: [Mon Oct 6 20:56:38 2025] HOME=/ [Mon Oct 6 20:56:38 2025] TERM=linux [Mon Oct 6 20:56:38 2025] BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 [Mon Oct 6 20:56:38 2025] systemd[1]: systemd 252-55.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified) [Mon Oct 6 20:56:38 2025] systemd[1]: Detected virtualization kvm. [Mon Oct 6 20:56:38 2025] systemd[1]: Detected architecture x86-64. [Mon Oct 6 20:56:38 2025] systemd[1]: Running in initrd. [Mon Oct 6 20:56:38 2025] systemd[1]: No hostname configured, using default hostname. [Mon Oct 6 20:56:38 2025] systemd[1]: Hostname set to . [Mon Oct 6 20:56:38 2025] systemd[1]: Initializing machine ID from VM UUID. [Mon Oct 6 20:56:38 2025] usb 1-1: New USB device found, idVendor=0627, idProduct=0001, bcdDevice= 0.00 [Mon Oct 6 20:56:38 2025] usb 1-1: New USB device strings: Mfr=1, Product=3, SerialNumber=10 [Mon Oct 6 20:56:38 2025] usb 1-1: Product: QEMU USB Tablet [Mon Oct 6 20:56:38 2025] usb 1-1: Manufacturer: QEMU [Mon Oct 6 20:56:38 2025] usb 1-1: SerialNumber: 28754-0000:00:01.2-1 [Mon Oct 6 20:56:38 2025] input: QEMU QEMU USB Tablet as /devices/pci0000:00/0000:00:01.2/usb1/1-1/1-1:1.0/0003:0627:0001.0001/input/input5 [Mon Oct 6 20:56:38 2025] hid-generic 0003:0627:0001.0001: input,hidraw0: USB HID v0.01 Mouse [QEMU QEMU USB Tablet] on usb-0000:00:01.2-1/input0 [Mon Oct 6 20:56:38 2025] systemd[1]: Queued start job for default target Initrd Default Target. [Mon Oct 6 20:56:38 2025] systemd[1]: Started Dispatch Password Requests to Console Directory Watch. [Mon Oct 6 20:56:38 2025] systemd[1]: Reached target Local Encrypted Volumes. [Mon Oct 6 20:56:38 2025] systemd[1]: Reached target Initrd /usr File System. [Mon Oct 6 20:56:38 2025] systemd[1]: Reached target Local File Systems. [Mon Oct 6 20:56:38 2025] systemd[1]: Reached target Path Units. [Mon Oct 6 20:56:38 2025] systemd[1]: Reached target Slice Units. [Mon Oct 6 20:56:38 2025] systemd[1]: Reached target Swaps. [Mon Oct 6 20:56:38 2025] systemd[1]: Reached target Timer Units. [Mon Oct 6 20:56:38 2025] systemd[1]: Listening on D-Bus System Message Bus Socket. [Mon Oct 6 20:56:38 2025] systemd[1]: Listening on Journal Socket (/dev/log). [Mon Oct 6 20:56:38 2025] systemd[1]: Listening on Journal Socket. [Mon Oct 6 20:56:38 2025] systemd[1]: Listening on udev Control Socket. [Mon Oct 6 20:56:38 2025] systemd[1]: Listening on udev Kernel Socket. [Mon Oct 6 20:56:38 2025] systemd[1]: Reached target Socket Units. [Mon Oct 6 20:56:38 2025] systemd[1]: Starting Create List of Static Device Nodes... [Mon Oct 6 20:56:38 2025] systemd[1]: Starting Journal Service... [Mon Oct 6 20:56:38 2025] systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met. [Mon Oct 6 20:56:38 2025] systemd[1]: Starting Apply Kernel Variables... [Mon Oct 6 20:56:38 2025] systemd[1]: Starting Create System Users... [Mon Oct 6 20:56:38 2025] systemd[1]: Starting Setup Virtual Console... [Mon Oct 6 20:56:38 2025] systemd[1]: Finished Create List of Static Device Nodes. [Mon Oct 6 20:56:38 2025] systemd[1]: Finished Apply Kernel Variables. [Mon Oct 6 20:56:38 2025] systemd[1]: Started Journal Service. [Mon Oct 6 20:56:39 2025] device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log. [Mon Oct 6 20:56:39 2025] device-mapper: uevent: version 1.0.3 [Mon Oct 6 20:56:39 2025] device-mapper: ioctl: 4.50.0-ioctl (2025-04-28) initialised: dm-devel@lists.linux.dev [Mon Oct 6 20:56:39 2025] RPC: Registered named UNIX socket transport module. [Mon Oct 6 20:56:39 2025] RPC: Registered udp transport module. [Mon Oct 6 20:56:39 2025] RPC: Registered tcp transport module. [Mon Oct 6 20:56:39 2025] RPC: Registered tcp-with-tls transport module. [Mon Oct 6 20:56:39 2025] RPC: Registered tcp NFSv4.1 backchannel transport module. [Mon Oct 6 20:56:39 2025] virtio_blk virtio2: 2/0/0 default/read/poll queues [Mon Oct 6 20:56:39 2025] virtio_blk virtio2: [vda] 83886080 512-byte logical blocks (42.9 GB/40.0 GiB) [Mon Oct 6 20:56:39 2025] vda: vda1 [Mon Oct 6 20:56:39 2025] libata version 3.00 loaded. [Mon Oct 6 20:56:39 2025] ata_piix 0000:00:01.1: version 2.13 [Mon Oct 6 20:56:39 2025] scsi host0: ata_piix [Mon Oct 6 20:56:39 2025] scsi host1: ata_piix [Mon Oct 6 20:56:39 2025] ata1: PATA max MWDMA2 cmd 0x1f0 ctl 0x3f6 bmdma 0xc140 irq 14 lpm-pol 0 [Mon Oct 6 20:56:39 2025] ata2: PATA max MWDMA2 cmd 0x170 ctl 0x376 bmdma 0xc148 irq 15 lpm-pol 0 [Mon Oct 6 20:56:39 2025] ata1: found unknown device (class 0) [Mon Oct 6 20:56:39 2025] ata1.00: ATAPI: QEMU DVD-ROM, 2.5+, max UDMA/100 [Mon Oct 6 20:56:39 2025] scsi 0:0:0:0: CD-ROM QEMU QEMU DVD-ROM 2.5+ PQ: 0 ANSI: 5 [Mon Oct 6 20:56:40 2025] scsi 0:0:0:0: Attached scsi generic sg0 type 5 [Mon Oct 6 20:56:40 2025] sr 0:0:0:0: [sr0] scsi3-mmc drive: 4x/4x cd/rw xa/form2 tray [Mon Oct 6 20:56:40 2025] cdrom: Uniform CD-ROM driver Revision: 3.20 [Mon Oct 6 20:56:40 2025] sr 0:0:0:0: Attached scsi CD-ROM sr0 [Mon Oct 6 20:56:40 2025] SGI XFS with ACLs, security attributes, scrub, quota, no debug enabled [Mon Oct 6 20:56:40 2025] XFS (vda1): Mounting V5 Filesystem 1631a6ad-43b8-436d-ae76-16fa14b94458 [Mon Oct 6 20:56:40 2025] XFS (vda1): Ending clean mount [Mon Oct 6 20:56:40 2025] systemd-journald[243]: Received SIGTERM from PID 1 (systemd). [Mon Oct 6 20:56:41 2025] audit: type=1404 audit(1759784201.348:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1 [Mon Oct 6 20:56:41 2025] SELinux: policy capability network_peer_controls=1 [Mon Oct 6 20:56:41 2025] SELinux: policy capability open_perms=1 [Mon Oct 6 20:56:41 2025] SELinux: policy capability extended_socket_class=1 [Mon Oct 6 20:56:41 2025] SELinux: policy capability always_check_network=0 [Mon Oct 6 20:56:41 2025] SELinux: policy capability cgroup_seclabel=1 [Mon Oct 6 20:56:41 2025] SELinux: policy capability nnp_nosuid_transition=1 [Mon Oct 6 20:56:41 2025] SELinux: policy capability genfs_seclabel_symlinks=1 [Mon Oct 6 20:56:41 2025] audit: type=1403 audit(1759784201.500:3): auid=4294967295 ses=4294967295 lsm=selinux res=1 [Mon Oct 6 20:56:41 2025] systemd[1]: Successfully loaded SELinux policy in 156.203ms. [Mon Oct 6 20:56:41 2025] systemd[1]: Relabelled /dev, /dev/shm, /run, /sys/fs/cgroup in 27.777ms. [Mon Oct 6 20:56:41 2025] systemd[1]: systemd 252-55.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified) [Mon Oct 6 20:56:41 2025] systemd[1]: Detected virtualization kvm. [Mon Oct 6 20:56:41 2025] systemd[1]: Detected architecture x86-64. [Mon Oct 6 20:56:41 2025] systemd-rc-local-generator[547]: /etc/rc.d/rc.local is not marked executable, skipping. [Mon Oct 6 20:56:41 2025] systemd[1]: initrd-switch-root.service: Deactivated successfully. [Mon Oct 6 20:56:41 2025] systemd[1]: Stopped Switch Root. [Mon Oct 6 20:56:41 2025] systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1. [Mon Oct 6 20:56:41 2025] systemd[1]: Created slice Slice /system/getty. [Mon Oct 6 20:56:41 2025] systemd[1]: Created slice Slice /system/serial-getty. [Mon Oct 6 20:56:41 2025] systemd[1]: Created slice Slice /system/sshd-keygen. [Mon Oct 6 20:56:41 2025] systemd[1]: Created slice User and Session Slice. [Mon Oct 6 20:56:41 2025] systemd[1]: Started Dispatch Password Requests to Console Directory Watch. [Mon Oct 6 20:56:41 2025] systemd[1]: Started Forward Password Requests to Wall Directory Watch. [Mon Oct 6 20:56:41 2025] systemd[1]: Set up automount Arbitrary Executable File Formats File System Automount Point. [Mon Oct 6 20:56:41 2025] systemd[1]: Reached target Local Encrypted Volumes. [Mon Oct 6 20:56:41 2025] systemd[1]: Stopped target Switch Root. [Mon Oct 6 20:56:41 2025] systemd[1]: Stopped target Initrd File Systems. [Mon Oct 6 20:56:41 2025] systemd[1]: Stopped target Initrd Root File System. [Mon Oct 6 20:56:41 2025] systemd[1]: Reached target Local Integrity Protected Volumes. [Mon Oct 6 20:56:41 2025] systemd[1]: Reached target Path Units. [Mon Oct 6 20:56:41 2025] systemd[1]: Reached target rpc_pipefs.target. [Mon Oct 6 20:56:41 2025] systemd[1]: Reached target Slice Units. [Mon Oct 6 20:56:41 2025] systemd[1]: Reached target Swaps. [Mon Oct 6 20:56:41 2025] systemd[1]: Reached target Local Verity Protected Volumes. [Mon Oct 6 20:56:41 2025] systemd[1]: Listening on RPCbind Server Activation Socket. [Mon Oct 6 20:56:41 2025] systemd[1]: Reached target RPC Port Mapper. [Mon Oct 6 20:56:41 2025] systemd[1]: Listening on Process Core Dump Socket. [Mon Oct 6 20:56:41 2025] systemd[1]: Listening on initctl Compatibility Named Pipe. [Mon Oct 6 20:56:41 2025] systemd[1]: Listening on udev Control Socket. [Mon Oct 6 20:56:41 2025] systemd[1]: Listening on udev Kernel Socket. [Mon Oct 6 20:56:41 2025] systemd[1]: Mounting Huge Pages File System... [Mon Oct 6 20:56:41 2025] systemd[1]: Mounting POSIX Message Queue File System... [Mon Oct 6 20:56:41 2025] systemd[1]: Mounting Kernel Debug File System... [Mon Oct 6 20:56:41 2025] systemd[1]: Mounting Kernel Trace File System... [Mon Oct 6 20:56:41 2025] systemd[1]: Kernel Module supporting RPCSEC_GSS was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Create List of Static Device Nodes... [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Load Kernel Module configfs... [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Load Kernel Module drm... [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Load Kernel Module efi_pstore... [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Load Kernel Module fuse... [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Read and set NIS domainname from /etc/sysconfig/network... [Mon Oct 6 20:56:41 2025] systemd[1]: systemd-fsck-root.service: Deactivated successfully. [Mon Oct 6 20:56:41 2025] systemd[1]: Stopped File System Check on Root Device. [Mon Oct 6 20:56:41 2025] systemd[1]: Stopped Journal Service. [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Journal Service... [Mon Oct 6 20:56:41 2025] systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met. [Mon Oct 6 20:56:41 2025] fuse: init (API version 7.37) [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Generate network units from Kernel command line... [Mon Oct 6 20:56:41 2025] systemd[1]: TPM2 PCR Machine ID Measurement was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Remount Root and Kernel File Systems... [Mon Oct 6 20:56:41 2025] systemd[1]: Repartition Root Disk was skipped because no trigger condition checks were met. [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Apply Kernel Variables... [Mon Oct 6 20:56:41 2025] systemd[1]: Starting Coldplug All udev Devices... [Mon Oct 6 20:56:41 2025] xfs filesystem being remounted at / supports timestamps until 2038 (0x7fffffff) [Mon Oct 6 20:56:41 2025] systemd[1]: Mounted Huge Pages File System. [Mon Oct 6 20:56:41 2025] systemd[1]: Mounted POSIX Message Queue File System. [Mon Oct 6 20:56:41 2025] systemd[1]: Mounted Kernel Debug File System. [Mon Oct 6 20:56:41 2025] systemd[1]: Mounted Kernel Trace File System. [Mon Oct 6 20:56:41 2025] systemd[1]: Started Journal Service. [Mon Oct 6 20:56:41 2025] ACPI: bus type drm_connector registered [Mon Oct 6 20:56:41 2025] systemd-journald[588]: Received client request to flush runtime journal. [Mon Oct 6 20:56:42 2025] Warning: Deprecated Driver is detected: nft_compat will not be maintained in a future major release and may be disabled [Mon Oct 6 20:56:42 2025] Warning: Deprecated Driver is detected: nft_compat_module_init will not be maintained in a future major release and may be disabled [Mon Oct 6 20:56:43 2025] input: PC Speaker as /devices/platform/pcspkr/input/input6 [Mon Oct 6 20:56:43 2025] piix4_smbus 0000:00:01.3: SMBus Host Controller at 0x700, revision 0 [Mon Oct 6 20:56:43 2025] i2c i2c-0: 1/1 memory slots populated (from DMI) [Mon Oct 6 20:56:43 2025] i2c i2c-0: Memory type 0x07 not supported yet, not instantiating SPD [Mon Oct 6 20:56:43 2025] [drm] pci: virtio-vga detected at 0000:00:02.0 [Mon Oct 6 20:56:43 2025] virtio-pci 0000:00:02.0: vgaarb: deactivate vga console [Mon Oct 6 20:56:43 2025] kvm_amd: TSC scaling supported [Mon Oct 6 20:56:43 2025] kvm_amd: Nested Virtualization enabled [Mon Oct 6 20:56:43 2025] kvm_amd: Nested Paging enabled [Mon Oct 6 20:56:43 2025] kvm_amd: LBR virtualization supported [Mon Oct 6 20:56:43 2025] Console: switching to colour dummy device 80x25 [Mon Oct 6 20:56:43 2025] [drm] features: -virgl +edid -resource_blob -host_visible [Mon Oct 6 20:56:43 2025] [drm] features: -context_init [Mon Oct 6 20:56:43 2025] [drm] number of scanouts: 1 [Mon Oct 6 20:56:43 2025] [drm] number of cap sets: 0 [Mon Oct 6 20:56:43 2025] [drm] Initialized virtio_gpu 0.1.0 for 0000:00:02.0 on minor 0 [Mon Oct 6 20:56:43 2025] fbcon: virtio_gpudrmfb (fb0) is primary device [Mon Oct 6 20:56:43 2025] Console: switching to colour frame buffer device 128x48 [Mon Oct 6 20:56:43 2025] virtio-pci 0000:00:02.0: [drm] fb0: virtio_gpudrmfb frame buffer device [Mon Oct 6 20:56:43 2025] ISO 9660 Extensions: Microsoft Joliet Level 3 [Mon Oct 6 20:56:43 2025] ISO 9660 Extensions: RRIP_1991A [Mon Oct 6 21:00:52 2025] pci 0000:00:07.0: [1af4:1000] type 00 class 0x020000 conventional PCI endpoint [Mon Oct 6 21:00:52 2025] pci 0000:00:07.0: BAR 0 [io 0x0000-0x003f] [Mon Oct 6 21:00:52 2025] pci 0000:00:07.0: BAR 1 [mem 0x00000000-0x00000fff] [Mon Oct 6 21:00:52 2025] pci 0000:00:07.0: BAR 4 [mem 0x00000000-0x00003fff 64bit pref] [Mon Oct 6 21:00:52 2025] pci 0000:00:07.0: ROM [mem 0x00000000-0x0007ffff pref] [Mon Oct 6 21:00:52 2025] pci 0000:00:07.0: ROM [mem 0xc0000000-0xc007ffff pref]: assigned [Mon Oct 6 21:00:52 2025] pci 0000:00:07.0: BAR 4 [mem 0x140000000-0x140003fff 64bit pref]: assigned [Mon Oct 6 21:00:52 2025] pci 0000:00:07.0: BAR 1 [mem 0xc0080000-0xc0080fff]: assigned [Mon Oct 6 21:00:52 2025] pci 0000:00:07.0: BAR 0 [io 0x1000-0x103f]: assigned [Mon Oct 6 21:00:52 2025] virtio-pci 0000:00:07.0: enabling device (0000 -> 0003) [Mon Oct 6 21:06:32 2025] systemd-rc-local-generator[5068]: /etc/rc.d/rc.local is not marked executable, skipping. [Mon Oct 6 21:06:56 2025] SELinux: Converting 366 SID table entries... [Mon Oct 6 21:06:56 2025] SELinux: policy capability network_peer_controls=1 [Mon Oct 6 21:06:56 2025] SELinux: policy capability open_perms=1 [Mon Oct 6 21:06:56 2025] SELinux: policy capability extended_socket_class=1 [Mon Oct 6 21:06:56 2025] SELinux: policy capability always_check_network=0 [Mon Oct 6 21:06:56 2025] SELinux: policy capability cgroup_seclabel=1 [Mon Oct 6 21:06:56 2025] SELinux: policy capability nnp_nosuid_transition=1 [Mon Oct 6 21:06:56 2025] SELinux: policy capability genfs_seclabel_symlinks=1 [Mon Oct 6 21:07:04 2025] SELinux: Converting 366 SID table entries... [Mon Oct 6 21:07:04 2025] SELinux: policy capability network_peer_controls=1 [Mon Oct 6 21:07:04 2025] SELinux: policy capability open_perms=1 [Mon Oct 6 21:07:04 2025] SELinux: policy capability extended_socket_class=1 [Mon Oct 6 21:07:04 2025] SELinux: policy capability always_check_network=0 [Mon Oct 6 21:07:04 2025] SELinux: policy capability cgroup_seclabel=1 [Mon Oct 6 21:07:04 2025] SELinux: policy capability nnp_nosuid_transition=1 [Mon Oct 6 21:07:04 2025] SELinux: policy capability genfs_seclabel_symlinks=1 [Mon Oct 6 21:07:13 2025] SELinux: Converting 366 SID table entries... [Mon Oct 6 21:07:13 2025] SELinux: policy capability network_peer_controls=1 [Mon Oct 6 21:07:13 2025] SELinux: policy capability open_perms=1 [Mon Oct 6 21:07:13 2025] SELinux: policy capability extended_socket_class=1 [Mon Oct 6 21:07:13 2025] SELinux: policy capability always_check_network=0 [Mon Oct 6 21:07:13 2025] SELinux: policy capability cgroup_seclabel=1 [Mon Oct 6 21:07:13 2025] SELinux: policy capability nnp_nosuid_transition=1 [Mon Oct 6 21:07:13 2025] SELinux: policy capability genfs_seclabel_symlinks=1 [Mon Oct 6 21:07:25 2025] SELinux: Converting 369 SID table entries... [Mon Oct 6 21:07:25 2025] SELinux: policy capability network_peer_controls=1 [Mon Oct 6 21:07:25 2025] SELinux: policy capability open_perms=1 [Mon Oct 6 21:07:25 2025] SELinux: policy capability extended_socket_class=1 [Mon Oct 6 21:07:25 2025] SELinux: policy capability always_check_network=0 [Mon Oct 6 21:07:25 2025] SELinux: policy capability cgroup_seclabel=1 [Mon Oct 6 21:07:25 2025] SELinux: policy capability nnp_nosuid_transition=1 [Mon Oct 6 21:07:25 2025] SELinux: policy capability genfs_seclabel_symlinks=1 [Mon Oct 6 21:07:46 2025] systemd-rc-local-generator[6118]: /etc/rc.d/rc.local is not marked executable, skipping. [Mon Oct 6 21:07:49 2025] evm: overlay not supported home/zuul/zuul-output/logs/selinux-denials.log0000644000000000000000000000000015071030444020577 0ustar rootroothome/zuul/zuul-output/logs/system-config/0000755000175000017500000000000015071030457017657 5ustar zuulzuulhome/zuul/zuul-output/logs/system-config/libvirt/0000755000175000017500000000000015071030460021324 5ustar zuulzuulhome/zuul/zuul-output/logs/system-config/libvirt/libvirt-admin.conf0000644000175000000000000000070215071030460024702 0ustar zuulroot# # This can be used to setup URI aliases for frequently # used connection URIs. Aliases may contain only the # characters a-Z, 0-9, _, -. # # Following the '=' may be any valid libvirt admin connection # URI, including arbitrary parameters #uri_aliases = [ # "admin=libvirtd:///system", #] # This specifies the default location the client tries to connect to if no other # URI is provided by the application #uri_default = "libvirtd:///system" home/zuul/zuul-output/logs/system-config/libvirt/libvirt.conf0000644000175000000000000000104315071030460023613 0ustar zuulroot# # This can be used to setup URI aliases for frequently # used connection URIs. Aliases may contain only the # characters a-Z, 0-9, _, -. # # Following the '=' may be any valid libvirt connection # URI, including arbitrary parameters #uri_aliases = [ # "hail=qemu+ssh://root@hail.cloud.example.com/system", # "sleet=qemu+ssh://root@sleet.cloud.example.com/system", #] # # These can be used in cases when no URI is supplied by the application # (@uri_default also prevents probing of the hypervisor driver). # #uri_default = "qemu:///system" home/zuul/zuul-output/logs/registries.conf0000644000000000000000000000763515071030460020042 0ustar rootroot# For more information on this configuration file, see containers-registries.conf(5). # # NOTE: RISK OF USING UNQUALIFIED IMAGE NAMES # We recommend always using fully qualified image names including the registry # server (full dns name), namespace, image name, and tag # (e.g., registry.redhat.io/ubi8/ubi:latest). Pulling by digest (i.e., # quay.io/repository/name@digest) further eliminates the ambiguity of tags. # When using short names, there is always an inherent risk that the image being # pulled could be spoofed. For example, a user wants to pull an image named # `foobar` from a registry and expects it to come from myregistry.com. If # myregistry.com is not first in the search list, an attacker could place a # different `foobar` image at a registry earlier in the search list. The user # would accidentally pull and run the attacker's image and code rather than the # intended content. We recommend only adding registries which are completely # trusted (i.e., registries which don't allow unknown or anonymous users to # create accounts with arbitrary names). This will prevent an image from being # spoofed, squatted or otherwise made insecure. If it is necessary to use one # of these registries, it should be added at the end of the list. # # # An array of host[:port] registries to try when pulling an unqualified image, in order. unqualified-search-registries = ["registry.access.redhat.com", "registry.redhat.io", "docker.io"] # [[registry]] # # The "prefix" field is used to choose the relevant [[registry]] TOML table; # # (only) the TOML table with the longest match for the input image name # # (taking into account namespace/repo/tag/digest separators) is used. # # # # The prefix can also be of the form: *.example.com for wildcard subdomain # # matching. # # # # If the prefix field is missing, it defaults to be the same as the "location" field. # prefix = "example.com/foo" # # # If true, unencrypted HTTP as well as TLS connections with untrusted # # certificates are allowed. # insecure = false # # # If true, pulling images with matching names is forbidden. # blocked = false # # # The physical location of the "prefix"-rooted namespace. # # # # By default, this is equal to "prefix" (in which case "prefix" can be omitted # # and the [[registry]] TOML table can only specify "location"). # # # # Example: Given # # prefix = "example.com/foo" # # location = "internal-registry-for-example.net/bar" # # requests for the image example.com/foo/myimage:latest will actually work with the # # internal-registry-for-example.net/bar/myimage:latest image. # # # The location can be empty iff prefix is in a # # wildcarded format: "*.example.com". In this case, the input reference will # # be used as-is without any rewrite. # location = internal-registry-for-example.com/bar" # # # (Possibly-partial) mirrors for the "prefix"-rooted namespace. # # # # The mirrors are attempted in the specified order; the first one that can be # # contacted and contains the image will be used (and if none of the mirrors contains the image, # # the primary location specified by the "registry.location" field, or using the unmodified # # user-specified reference, is tried last). # # # # Each TOML table in the "mirror" array can contain the following fields, with the same semantics # # as if specified in the [[registry]] TOML table directly: # # - location # # - insecure # [[registry.mirror]] # location = "example-mirror-0.local/mirror-for-foo" # [[registry.mirror]] # location = "example-mirror-1.local/mirrors/foo" # insecure = true # # Given the above, a pull of example.com/foo/image:latest will try: # # 1. example-mirror-0.local/mirror-for-foo/image:latest # # 2. example-mirror-1.local/mirrors/foo/image:latest # # 3. internal-registry-for-example.net/bar/image:latest # # in order, and use the first one that exists. short-name-mode = "enforcing" # BEGIN ANSIBLE MANAGED BLOCK [[registry]] location = "38.102.83.53:5001" insecure = true # END ANSIBLE MANAGED BLOCK home/zuul/zuul-output/logs/registries.conf.d/0000755000175000000000000000000015071030460020357 5ustar zuulroothome/zuul/zuul-output/logs/registries.conf.d/000-shortnames.conf0000644000175000000000000001735515071030460023721 0ustar zuulroot[aliases] # almalinux "almalinux" = "docker.io/library/almalinux" "almalinux-minimal" = "docker.io/library/almalinux-minimal" # Amazon Linux "amazonlinux" = "public.ecr.aws/amazonlinux/amazonlinux" # Arch Linux "archlinux" = "docker.io/library/archlinux" # centos "centos" = "quay.io/centos/centos" # containers "skopeo" = "quay.io/skopeo/stable" "buildah" = "quay.io/buildah/stable" "podman" = "quay.io/podman/stable" "hello" = "quay.io/podman/hello" "hello-world" = "quay.io/podman/hello" # docker "alpine" = "docker.io/library/alpine" "docker" = "docker.io/library/docker" "registry" = "docker.io/library/registry" "swarm" = "docker.io/library/swarm" # Fedora "fedora-bootc" = "registry.fedoraproject.org/fedora-bootc" "fedora-minimal" = "registry.fedoraproject.org/fedora-minimal" "fedora" = "registry.fedoraproject.org/fedora" # Gentoo "gentoo" = "docker.io/gentoo/stage3" # openSUSE "opensuse/tumbleweed" = "registry.opensuse.org/opensuse/tumbleweed" "opensuse/tumbleweed-dnf" = "registry.opensuse.org/opensuse/tumbleweed-dnf" "opensuse/tumbleweed-microdnf" = "registry.opensuse.org/opensuse/tumbleweed-microdnf" "opensuse/leap" = "registry.opensuse.org/opensuse/leap" "opensuse/busybox" = "registry.opensuse.org/opensuse/busybox" "tumbleweed" = "registry.opensuse.org/opensuse/tumbleweed" "tumbleweed-dnf" = "registry.opensuse.org/opensuse/tumbleweed-dnf" "tumbleweed-microdnf" = "registry.opensuse.org/opensuse/tumbleweed-microdnf" "leap" = "registry.opensuse.org/opensuse/leap" "leap-dnf" = "registry.opensuse.org/opensuse/leap-dnf" "leap-microdnf" = "registry.opensuse.org/opensuse/leap-microdnf" "tw-busybox" = "registry.opensuse.org/opensuse/busybox" # OTel (Open Telemetry) - opentelemetry.io "otel/autoinstrumentation-go" = "docker.io/otel/autoinstrumentation-go" "otel/autoinstrumentation-nodejs" = "docker.io/otel/autoinstrumentation-nodejs" "otel/autoinstrumentation-python" = "docker.io/otel/autoinstrumentation-python" "otel/autoinstrumentation-java" = "docker.io/otel/autoinstrumentation-java" "otel/autoinstrumentation-dotnet" = "docker.io/otel/autoinstrumentation-dotnet" "otel/opentelemetry-collector" = "docker.io/otel/opentelemetry-collector" "otel/opentelemetry-collector-contrib" = "docker.io/otel/opentelemetry-collector-contrib" "otel/opentelemetry-collector-contrib-dev" = "docker.io/otel/opentelemetry-collector-contrib-dev" "otel/opentelemetry-collector-k8s" = "docker.io/otel/opentelemetry-collector-k8s" "otel/opentelemetry-operator" = "docker.io/otel/opentelemetry-operator" "otel/opentelemetry-operator-bundle" = "docker.io/otel/opentelemetry-operator-bundle" "otel/operator-opamp-bridge" = "docker.io/otel/operator-opamp-bridge" "otel/semconvgen" = "docker.io/otel/semconvgen" "otel/weaver" = "docker.io/otel/weaver" # SUSE "suse/sle15" = "registry.suse.com/suse/sle15" "suse/sles12sp5" = "registry.suse.com/suse/sles12sp5" "suse/sles12sp4" = "registry.suse.com/suse/sles12sp4" "suse/sles12sp3" = "registry.suse.com/suse/sles12sp3" "sle15" = "registry.suse.com/suse/sle15" "sles12sp5" = "registry.suse.com/suse/sles12sp5" "sles12sp4" = "registry.suse.com/suse/sles12sp4" "sles12sp3" = "registry.suse.com/suse/sles12sp3" "bci-base" = "registry.suse.com/bci/bci-base" "bci/bci-base" = "registry.suse.com/bci/bci-base" "bci-micro" = "registry.suse.com/bci/bci-micro" "bci/bci-micro" = "registry.suse.com/bci/bci-micro" "bci-minimal" = "registry.suse.com/bci/bci-minimal" "bci/bci-minimal" = "registry.suse.com/bci/bci-minimal" "bci-busybox" = "registry.suse.com/bci/bci-busybox" "bci/bci-busybox" = "registry.suse.com/bci/bci-busybox" # Red Hat Enterprise Linux "rhel" = "registry.access.redhat.com/rhel" "rhel6" = "registry.access.redhat.com/rhel6" "rhel7" = "registry.access.redhat.com/rhel7" "rhel7.9" = "registry.access.redhat.com/rhel7.9" "rhel-atomic" = "registry.access.redhat.com/rhel-atomic" "rhel9-bootc" = "registry.redhat.io/rhel9/rhel-bootc" "rhel-minimal" = "registry.access.redhat.com/rhel-minimal" "rhel-init" = "registry.access.redhat.com/rhel-init" "rhel7-atomic" = "registry.access.redhat.com/rhel7-atomic" "rhel7-minimal" = "registry.access.redhat.com/rhel7-minimal" "rhel7-init" = "registry.access.redhat.com/rhel7-init" "rhel7/rhel" = "registry.access.redhat.com/rhel7/rhel" "rhel7/rhel-atomic" = "registry.access.redhat.com/rhel7/rhel7/rhel-atomic" "ubi7/ubi" = "registry.access.redhat.com/ubi7/ubi" "ubi7/ubi-minimal" = "registry.access.redhat.com/ubi7-minimal" "ubi7/ubi-init" = "registry.access.redhat.com/ubi7-init" "ubi7" = "registry.access.redhat.com/ubi7" "ubi7-init" = "registry.access.redhat.com/ubi7-init" "ubi7-minimal" = "registry.access.redhat.com/ubi7-minimal" "rhel8" = "registry.access.redhat.com/ubi8" "rhel8-init" = "registry.access.redhat.com/ubi8-init" "rhel8-minimal" = "registry.access.redhat.com/ubi8-minimal" "rhel8-micro" = "registry.access.redhat.com/ubi8-micro" "ubi8" = "registry.access.redhat.com/ubi8" "ubi8-minimal" = "registry.access.redhat.com/ubi8-minimal" "ubi8-init" = "registry.access.redhat.com/ubi8-init" "ubi8-micro" = "registry.access.redhat.com/ubi8-micro" "ubi8/ubi" = "registry.access.redhat.com/ubi8/ubi" "ubi8/ubi-minimal" = "registry.access.redhat.com/ubi8-minimal" "ubi8/ubi-init" = "registry.access.redhat.com/ubi8-init" "ubi8/ubi-micro" = "registry.access.redhat.com/ubi8-micro" "ubi8/podman" = "registry.access.redhat.com/ubi8/podman" "ubi8/buildah" = "registry.access.redhat.com/ubi8/buildah" "ubi8/skopeo" = "registry.access.redhat.com/ubi8/skopeo" "rhel9" = "registry.access.redhat.com/ubi9" "rhel9-init" = "registry.access.redhat.com/ubi9-init" "rhel9-minimal" = "registry.access.redhat.com/ubi9-minimal" "rhel9-micro" = "registry.access.redhat.com/ubi9-micro" "ubi9" = "registry.access.redhat.com/ubi9" "ubi9-minimal" = "registry.access.redhat.com/ubi9-minimal" "ubi9-init" = "registry.access.redhat.com/ubi9-init" "ubi9-micro" = "registry.access.redhat.com/ubi9-micro" "ubi9/ubi" = "registry.access.redhat.com/ubi9/ubi" "ubi9/ubi-minimal" = "registry.access.redhat.com/ubi9-minimal" "ubi9/ubi-init" = "registry.access.redhat.com/ubi9-init" "ubi9/ubi-micro" = "registry.access.redhat.com/ubi9-micro" "ubi9/podman" = "registry.access.redhat.com/ubi9/podman" "ubi9/buildah" = "registry.access.redhat.com/ubi9/buildah" "ubi9/skopeo" = "registry.access.redhat.com/ubi9/skopeo" # Rocky Linux "rockylinux" = "quay.io/rockylinux/rockylinux" # Debian "debian" = "docker.io/library/debian" # Kali Linux "kali-bleeding-edge" = "docker.io/kalilinux/kali-bleeding-edge" "kali-dev" = "docker.io/kalilinux/kali-dev" "kali-experimental" = "docker.io/kalilinux/kali-experimental" "kali-last-release" = "docker.io/kalilinux/kali-last-release" "kali-rolling" = "docker.io/kalilinux/kali-rolling" # Ubuntu "ubuntu" = "docker.io/library/ubuntu" # Oracle Linux "oraclelinux" = "container-registry.oracle.com/os/oraclelinux" # busybox "busybox" = "docker.io/library/busybox" # golang "golang" = "docker.io/library/golang" # php "php" = "docker.io/library/php" # python "python" = "docker.io/library/python" # rust "rust" = "docker.io/library/rust" # node "node" = "docker.io/library/node" # Grafana Labs "grafana/agent" = "docker.io/grafana/agent" "grafana/grafana" = "docker.io/grafana/grafana" "grafana/k6" = "docker.io/grafana/k6" "grafana/loki" = "docker.io/grafana/loki" "grafana/mimir" = "docker.io/grafana/mimir" "grafana/oncall" = "docker.io/grafana/oncall" "grafana/pyroscope" = "docker.io/grafana/pyroscope" "grafana/tempo" = "docker.io/grafana/tempo" # curl "curl" = "quay.io/curl/curl" # nginx "nginx" = "docker.io/library/nginx" # QUBIP "qubip/pq-container" = "quay.io/qubip/pq-container" home/zuul/zuul-output/artifacts/0000755000175000017500000000000015071026621016102 5ustar zuulzuulhome/zuul/zuul-output/docs/0000755000175000017500000000000015071026621015052 5ustar zuulzuul