home/zuul/zuul-output/0000755000175000017500000000000015134407453014130 5ustar zuulzuulhome/zuul/zuul-output/logs/0000755000175000017500000000000015134437413015073 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/0000755000175000017500000000000015134437364020375 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/0000755000175000017500000000000015134437341021334 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/2026-01-22_14-28/0000775000175000017500000000000015134437342023122 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/2026-01-22_14-28/ansible.log0000666000175000017500000123064715134421263025254 0ustar zuulzuul2026-01-22 12:04:03,687 p=31865 u=zuul n=ansible | Starting galaxy collection install process 2026-01-22 12:04:03,688 p=31865 u=zuul n=ansible | Process install dependency map 2026-01-22 12:04:25,462 p=31865 u=zuul n=ansible | Starting collection install process 2026-01-22 12:04:25,462 p=31865 u=zuul n=ansible | Installing 'cifmw.general:1.0.0+daa79182' to '/home/zuul/.ansible/collections/ansible_collections/cifmw/general' 2026-01-22 12:04:25,996 p=31865 u=zuul n=ansible | Created collection for cifmw.general:1.0.0+daa79182 at /home/zuul/.ansible/collections/ansible_collections/cifmw/general 2026-01-22 12:04:25,997 p=31865 u=zuul n=ansible | cifmw.general:1.0.0+daa79182 was installed successfully 2026-01-22 12:04:25,997 p=31865 u=zuul n=ansible | Installing 'containers.podman:1.16.2' to '/home/zuul/.ansible/collections/ansible_collections/containers/podman' 2026-01-22 12:04:26,060 p=31865 u=zuul n=ansible | Created collection for containers.podman:1.16.2 at /home/zuul/.ansible/collections/ansible_collections/containers/podman 2026-01-22 12:04:26,060 p=31865 u=zuul n=ansible | containers.podman:1.16.2 was installed successfully 2026-01-22 12:04:26,060 p=31865 u=zuul n=ansible | Installing 'community.general:10.0.1' to '/home/zuul/.ansible/collections/ansible_collections/community/general' 2026-01-22 12:04:26,969 p=31865 u=zuul n=ansible | Created collection for community.general:10.0.1 at /home/zuul/.ansible/collections/ansible_collections/community/general 2026-01-22 12:04:26,970 p=31865 u=zuul n=ansible | community.general:10.0.1 was installed successfully 2026-01-22 12:04:26,970 p=31865 u=zuul n=ansible | Installing 'ansible.posix:1.6.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/posix' 2026-01-22 12:04:27,032 p=31865 u=zuul n=ansible | Created collection for ansible.posix:1.6.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/posix 2026-01-22 12:04:27,032 p=31865 u=zuul n=ansible | ansible.posix:1.6.2 was installed successfully 2026-01-22 12:04:27,033 p=31865 u=zuul n=ansible | Installing 'ansible.utils:5.1.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/utils' 2026-01-22 12:04:27,153 p=31865 u=zuul n=ansible | Created collection for ansible.utils:5.1.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/utils 2026-01-22 12:04:27,154 p=31865 u=zuul n=ansible | ansible.utils:5.1.2 was installed successfully 2026-01-22 12:04:27,154 p=31865 u=zuul n=ansible | Installing 'community.libvirt:1.3.0' to '/home/zuul/.ansible/collections/ansible_collections/community/libvirt' 2026-01-22 12:04:27,188 p=31865 u=zuul n=ansible | Created collection for community.libvirt:1.3.0 at /home/zuul/.ansible/collections/ansible_collections/community/libvirt 2026-01-22 12:04:27,188 p=31865 u=zuul n=ansible | community.libvirt:1.3.0 was installed successfully 2026-01-22 12:04:27,188 p=31865 u=zuul n=ansible | Installing 'community.crypto:2.22.3' to '/home/zuul/.ansible/collections/ansible_collections/community/crypto' 2026-01-22 12:04:27,344 p=31865 u=zuul n=ansible | Created collection for community.crypto:2.22.3 at /home/zuul/.ansible/collections/ansible_collections/community/crypto 2026-01-22 12:04:27,344 p=31865 u=zuul n=ansible | community.crypto:2.22.3 was installed successfully 2026-01-22 12:04:27,344 p=31865 u=zuul n=ansible | Installing 'kubernetes.core:5.0.0' to '/home/zuul/.ansible/collections/ansible_collections/kubernetes/core' 2026-01-22 12:04:27,470 p=31865 u=zuul n=ansible | Created collection for kubernetes.core:5.0.0 at /home/zuul/.ansible/collections/ansible_collections/kubernetes/core 2026-01-22 12:04:27,470 p=31865 u=zuul n=ansible | kubernetes.core:5.0.0 was installed successfully 2026-01-22 12:04:27,470 p=31865 u=zuul n=ansible | Installing 'ansible.netcommon:7.1.0' to '/home/zuul/.ansible/collections/ansible_collections/ansible/netcommon' 2026-01-22 12:04:27,539 p=31865 u=zuul n=ansible | Created collection for ansible.netcommon:7.1.0 at /home/zuul/.ansible/collections/ansible_collections/ansible/netcommon 2026-01-22 12:04:27,540 p=31865 u=zuul n=ansible | ansible.netcommon:7.1.0 was installed successfully 2026-01-22 12:04:27,540 p=31865 u=zuul n=ansible | Installing 'openstack.config_template:2.1.1' to '/home/zuul/.ansible/collections/ansible_collections/openstack/config_template' 2026-01-22 12:04:27,557 p=31865 u=zuul n=ansible | Created collection for openstack.config_template:2.1.1 at /home/zuul/.ansible/collections/ansible_collections/openstack/config_template 2026-01-22 12:04:27,557 p=31865 u=zuul n=ansible | openstack.config_template:2.1.1 was installed successfully 2026-01-22 12:04:27,557 p=31865 u=zuul n=ansible | Installing 'junipernetworks.junos:9.1.0' to '/home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos' 2026-01-22 12:04:27,787 p=31865 u=zuul n=ansible | Created collection for junipernetworks.junos:9.1.0 at /home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos 2026-01-22 12:04:27,787 p=31865 u=zuul n=ansible | junipernetworks.junos:9.1.0 was installed successfully 2026-01-22 12:04:27,787 p=31865 u=zuul n=ansible | Installing 'cisco.ios:9.0.3' to '/home/zuul/.ansible/collections/ansible_collections/cisco/ios' 2026-01-22 12:04:28,061 p=31865 u=zuul n=ansible | Created collection for cisco.ios:9.0.3 at /home/zuul/.ansible/collections/ansible_collections/cisco/ios 2026-01-22 12:04:28,061 p=31865 u=zuul n=ansible | cisco.ios:9.0.3 was installed successfully 2026-01-22 12:04:28,061 p=31865 u=zuul n=ansible | Installing 'mellanox.onyx:1.0.0' to '/home/zuul/.ansible/collections/ansible_collections/mellanox/onyx' 2026-01-22 12:04:28,093 p=31865 u=zuul n=ansible | Created collection for mellanox.onyx:1.0.0 at /home/zuul/.ansible/collections/ansible_collections/mellanox/onyx 2026-01-22 12:04:28,093 p=31865 u=zuul n=ansible | mellanox.onyx:1.0.0 was installed successfully 2026-01-22 12:04:28,093 p=31865 u=zuul n=ansible | Installing 'community.okd:4.0.0' to '/home/zuul/.ansible/collections/ansible_collections/community/okd' 2026-01-22 12:04:28,122 p=31865 u=zuul n=ansible | Created collection for community.okd:4.0.0 at /home/zuul/.ansible/collections/ansible_collections/community/okd 2026-01-22 12:04:28,122 p=31865 u=zuul n=ansible | community.okd:4.0.0 was installed successfully 2026-01-22 12:04:28,122 p=31865 u=zuul n=ansible | Installing '@NAMESPACE@.@NAME@:3.1.4' to '/home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@' 2026-01-22 12:04:28,213 p=31865 u=zuul n=ansible | Created collection for @NAMESPACE@.@NAME@:3.1.4 at /home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@ 2026-01-22 12:04:28,213 p=31865 u=zuul n=ansible | @NAMESPACE@.@NAME@:3.1.4 was installed successfully 2026-01-22 12:04:46,624 p=33295 u=zuul n=ansible | [WARNING]: Invalid characters were found in group names but not replaced, use -vvvv to see details 2026-01-22 12:04:47,311 p=33295 u=zuul n=ansible | PLAY [localhost] *************************************************************** 2026-01-22 12:04:47,333 p=33295 u=zuul n=ansible | TASK [Gathering Facts ] ******************************************************** 2026-01-22 12:04:47,333 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:47 +0000 (0:00:00.042) 0:00:00.042 ****** 2026-01-22 12:04:47,333 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:47 +0000 (0:00:00.040) 0:00:00.040 ****** 2026-01-22 12:04:48,447 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:48,470 p=33295 u=zuul n=ansible | TASK [Create bootstrap_common_vars file src={{ playbook_dir }}/files/common_bootstrap_params.yml.j2, dest={{ bootstrap_common_vars }}] *** 2026-01-22 12:04:48,470 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:48 +0000 (0:00:01.136) 0:00:01.179 ****** 2026-01-22 12:04:48,470 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:48 +0000 (0:00:01.137) 0:00:01.177 ****** 2026-01-22 12:04:49,175 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:04:49,182 p=33295 u=zuul n=ansible | TASK [Set vars as fact - bootstrap common vars name=cifmw_helpers, tasks_from=var_file.yml] *** 2026-01-22 12:04:49,182 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.712) 0:00:01.891 ****** 2026-01-22 12:04:49,182 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.712) 0:00:01.889 ****** 2026-01-22 12:04:49,326 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Fail if file is not yaml or yml extension msg=File needs to be yaml/yml extension] *** 2026-01-22 12:04:49,327 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.144) 0:00:02.036 ****** 2026-01-22 12:04:49,327 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.144) 0:00:02.034 ****** 2026-01-22 12:04:49,348 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:04:49,355 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Check if file is available path={{ provided_file | trim }}] *** 2026-01-22 12:04:49,355 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.028) 0:00:02.064 ****** 2026-01-22 12:04:49,355 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.028) 0:00:02.062 ****** 2026-01-22 12:04:49,547 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:49,558 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Read the vars] ******************************************* 2026-01-22 12:04:49,558 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.203) 0:00:02.268 ****** 2026-01-22 12:04:49,558 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.203) 0:00:02.266 ****** 2026-01-22 12:04:49,807 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:49,816 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Set vars as fact] **************************************** 2026-01-22 12:04:49,816 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.257) 0:00:02.525 ****** 2026-01-22 12:04:49,816 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.257) 0:00:02.523 ****** 2026-01-22 12:04:49,839 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:04:49,843 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:49,854 p=33295 u=zuul n=ansible | TASK [Include vars from cifmw_extras _raw_params={{ item | replace('@','') }}] *** 2026-01-22 12:04:49,854 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.038) 0:00:02.564 ****** 2026-01-22 12:04:49,854 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.038) 0:00:02.562 ****** 2026-01-22 12:04:49,875 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:04:49,882 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Set custom cifmw PATH reusable fact cifmw_path={{ ansible_user_dir }}/.crc/bin:{{ ansible_user_dir }}/.crc/bin/oc:{{ ansible_user_dir }}/bin:{{ ansible_env.PATH }}, cacheable=True] *** 2026-01-22 12:04:49,882 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.027) 0:00:02.591 ****** 2026-01-22 12:04:49,882 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.027) 0:00:02.589 ****** 2026-01-22 12:04:49,907 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:49,914 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Get customized parameters ci_framework_params={{ hostvars[inventory_hostname] | dict2items | selectattr("key", "match", "^(cifmw|pre|post)_(?!install_yamls|openshift_token|openshift_login|openshift_kubeconfig).*") | list | items2dict }}] *** 2026-01-22 12:04:49,914 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.032) 0:00:02.624 ****** 2026-01-22 12:04:49,914 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.032) 0:00:02.622 ****** 2026-01-22 12:04:49,965 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:49,972 p=33295 u=zuul n=ansible | TASK [install_ca : Ensure target directory exists path={{ cifmw_install_ca_trust_dir }}, state=directory, mode=0755] *** 2026-01-22 12:04:49,973 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.058) 0:00:02.682 ****** 2026-01-22 12:04:49,973 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.058) 0:00:02.680 ****** 2026-01-22 12:04:50,406 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:50,416 p=33295 u=zuul n=ansible | TASK [install_ca : Install internal CA from url url={{ cifmw_install_ca_url }}, dest={{ cifmw_install_ca_trust_dir }}, validate_certs={{ cifmw_install_ca_url_validate_certs | default(omit) }}, mode=0644] *** 2026-01-22 12:04:50,416 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.443) 0:00:03.126 ****** 2026-01-22 12:04:50,416 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.443) 0:00:03.124 ****** 2026-01-22 12:04:50,454 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:04:50,471 p=33295 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from inline dest={{ cifmw_install_ca_trust_dir }}/cifmw_inline_ca_bundle.crt, content={{ cifmw_install_ca_bundle_inline }}, mode=0644] *** 2026-01-22 12:04:50,472 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.055) 0:00:03.181 ****** 2026-01-22 12:04:50,472 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.055) 0:00:03.179 ****** 2026-01-22 12:04:50,510 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:04:50,519 p=33295 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from file dest={{ cifmw_install_ca_trust_dir }}/{{ cifmw_install_ca_bundle_src | basename }}, src={{ cifmw_install_ca_bundle_src }}, mode=0644] *** 2026-01-22 12:04:50,519 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.047) 0:00:03.229 ****** 2026-01-22 12:04:50,519 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.047) 0:00:03.227 ****** 2026-01-22 12:04:50,544 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:04:50,551 p=33295 u=zuul n=ansible | TASK [install_ca : Update ca bundle _raw_params=update-ca-trust] *************** 2026-01-22 12:04:50,552 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.032) 0:00:03.261 ****** 2026-01-22 12:04:50,552 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.032) 0:00:03.259 ****** 2026-01-22 12:04:52,108 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:04:52,122 p=33295 u=zuul n=ansible | TASK [repo_setup : Ensure directories are present path={{ cifmw_repo_setup_basedir }}/{{ item }}, state=directory, mode=0755] *** 2026-01-22 12:04:52,122 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:52 +0000 (0:00:01.570) 0:00:04.831 ****** 2026-01-22 12:04:52,122 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:52 +0000 (0:00:01.570) 0:00:04.829 ****** 2026-01-22 12:04:52,316 p=33295 u=zuul n=ansible | changed: [localhost] => (item=tmp) 2026-01-22 12:04:52,498 p=33295 u=zuul n=ansible | changed: [localhost] => (item=artifacts/repositories) 2026-01-22 12:04:52,702 p=33295 u=zuul n=ansible | changed: [localhost] => (item=venv/repo_setup) 2026-01-22 12:04:52,713 p=33295 u=zuul n=ansible | TASK [repo_setup : Make sure git-core package is installed name=git-core, state=present] *** 2026-01-22 12:04:52,713 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:52 +0000 (0:00:00.591) 0:00:05.423 ****** 2026-01-22 12:04:52,714 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:52 +0000 (0:00:00.591) 0:00:05.421 ****** 2026-01-22 12:04:53,725 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:53,735 p=33295 u=zuul n=ansible | TASK [repo_setup : Get repo-setup repository accept_hostkey=True, dest={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, repo={{ cifmw_repo_setup_src }}] *** 2026-01-22 12:04:53,735 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:53 +0000 (0:00:01.021) 0:00:06.444 ****** 2026-01-22 12:04:53,735 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:53 +0000 (0:00:01.021) 0:00:06.442 ****** 2026-01-22 12:04:54,697 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:04:54,709 p=33295 u=zuul n=ansible | TASK [repo_setup : Initialize python venv and install requirements virtualenv={{ cifmw_repo_setup_venv }}, requirements={{ cifmw_repo_setup_basedir }}/tmp/repo-setup/requirements.txt, virtualenv_command=python3 -m venv --system-site-packages --upgrade-deps] *** 2026-01-22 12:04:54,709 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:54 +0000 (0:00:00.974) 0:00:07.418 ****** 2026-01-22 12:04:54,709 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:54 +0000 (0:00:00.974) 0:00:07.416 ****** 2026-01-22 12:05:04,192 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:04,200 p=33295 u=zuul n=ansible | TASK [repo_setup : Install repo-setup package chdir={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, creates={{ cifmw_repo_setup_venv }}/bin/repo-setup, _raw_params={{ cifmw_repo_setup_venv }}/bin/python setup.py install] *** 2026-01-22 12:05:04,200 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:04 +0000 (0:00:09.490) 0:00:16.909 ****** 2026-01-22 12:05:04,200 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:04 +0000 (0:00:09.490) 0:00:16.907 ****** 2026-01-22 12:05:04,984 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:04,993 p=33295 u=zuul n=ansible | TASK [repo_setup : Set cifmw_repo_setup_dlrn_hash_tag from content provider cifmw_repo_setup_dlrn_hash_tag={{ content_provider_dlrn_md5_hash }}] *** 2026-01-22 12:05:04,993 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:04 +0000 (0:00:00.793) 0:00:17.703 ****** 2026-01-22 12:05:04,994 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:04 +0000 (0:00:00.793) 0:00:17.701 ****** 2026-01-22 12:05:05,016 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:05,023 p=33295 u=zuul n=ansible | TASK [repo_setup : Run repo-setup _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup {{ cifmw_repo_setup_promotion }} {{ cifmw_repo_setup_additional_repos }} -d {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} -b {{ cifmw_repo_setup_branch }} --rdo-mirror {{ cifmw_repo_setup_rdo_mirror }} {% if cifmw_repo_setup_dlrn_hash_tag | length > 0 %} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif %} -o {{ cifmw_repo_setup_output }}] *** 2026-01-22 12:05:05,023 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.029) 0:00:17.733 ****** 2026-01-22 12:05:05,023 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.029) 0:00:17.731 ****** 2026-01-22 12:05:05,640 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:05,648 p=33295 u=zuul n=ansible | TASK [repo_setup : Get component repo url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/component/{{ cifmw_repo_setup_component_name }}/{{ cifmw_repo_setup_component_promotion_tag }}/delorean.repo, dest={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, mode=0644] *** 2026-01-22 12:05:05,649 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.625) 0:00:18.358 ****** 2026-01-22 12:05:05,649 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.625) 0:00:18.356 ****** 2026-01-22 12:05:05,679 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:05,688 p=33295 u=zuul n=ansible | TASK [repo_setup : Rename component repo path={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, regexp=delorean-component-{{ cifmw_repo_setup_component_name }}, replace={{ cifmw_repo_setup_component_name }}-{{ cifmw_repo_setup_component_promotion_tag }}] *** 2026-01-22 12:05:05,688 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.039) 0:00:18.397 ****** 2026-01-22 12:05:05,688 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.039) 0:00:18.395 ****** 2026-01-22 12:05:05,719 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:05,727 p=33295 u=zuul n=ansible | TASK [repo_setup : Disable component repo in current-podified dlrn repo path={{ cifmw_repo_setup_output }}/delorean.repo, section=delorean-component-{{ cifmw_repo_setup_component_name }}, option=enabled, value=0, mode=0644] *** 2026-01-22 12:05:05,728 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.039) 0:00:18.437 ****** 2026-01-22 12:05:05,728 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.039) 0:00:18.435 ****** 2026-01-22 12:05:05,762 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:05,770 p=33295 u=zuul n=ansible | TASK [repo_setup : Run repo-setup-get-hash _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup-get-hash --dlrn-url {{ cifmw_repo_setup_dlrn_uri[:-1] }} --os-version {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} --release {{ cifmw_repo_setup_branch }} {% if cifmw_repo_setup_component_name | length > 0 -%} --component {{ cifmw_repo_setup_component_name }} --tag {{ cifmw_repo_setup_component_promotion_tag }} {% else -%} --tag {{cifmw_repo_setup_promotion }} {% endif -%} {% if (cifmw_repo_setup_dlrn_hash_tag | length > 0) and (cifmw_repo_setup_component_name | length <= 0) -%} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif -%} --json] *** 2026-01-22 12:05:05,770 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.042) 0:00:18.479 ****** 2026-01-22 12:05:05,770 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.042) 0:00:18.477 ****** 2026-01-22 12:05:06,223 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:06,231 p=33295 u=zuul n=ansible | TASK [repo_setup : Dump full hash in delorean.repo.md5 file content={{ _repo_setup_json['full_hash'] }} , dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2026-01-22 12:05:06,231 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.460) 0:00:18.940 ****** 2026-01-22 12:05:06,231 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.460) 0:00:18.938 ****** 2026-01-22 12:05:06,643 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:06,651 p=33295 u=zuul n=ansible | TASK [repo_setup : Dump current-podified hash url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/current-podified/delorean.repo.md5, dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2026-01-22 12:05:06,651 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.419) 0:00:19.360 ****** 2026-01-22 12:05:06,651 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.420) 0:00:19.358 ****** 2026-01-22 12:05:06,666 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,674 p=33295 u=zuul n=ansible | TASK [repo_setup : Slurp current podified hash src={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5] *** 2026-01-22 12:05:06,674 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.023) 0:00:19.384 ****** 2026-01-22 12:05:06,674 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.023) 0:00:19.382 ****** 2026-01-22 12:05:06,689 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,697 p=33295 u=zuul n=ansible | TASK [repo_setup : Update the value of full_hash _repo_setup_json={{ _repo_setup_json | combine({'full_hash': _hash}, recursive=true) }}] *** 2026-01-22 12:05:06,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.406 ****** 2026-01-22 12:05:06,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.404 ****** 2026-01-22 12:05:06,712 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,720 p=33295 u=zuul n=ansible | TASK [repo_setup : Export hashes facts for further use cifmw_repo_setup_full_hash={{ _repo_setup_json['full_hash'] }}, cifmw_repo_setup_commit_hash={{ _repo_setup_json['commit_hash'] }}, cifmw_repo_setup_distro_hash={{ _repo_setup_json['distro_hash'] }}, cifmw_repo_setup_extended_hash={{ _repo_setup_json['extended_hash'] }}, cifmw_repo_setup_dlrn_api_url={{ _repo_setup_json['dlrn_api_url'] }}, cifmw_repo_setup_dlrn_url={{ _repo_setup_json['dlrn_url'] }}, cifmw_repo_setup_release={{ _repo_setup_json['release'] }}, cacheable=True] *** 2026-01-22 12:05:06,720 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.429 ****** 2026-01-22 12:05:06,720 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.427 ****** 2026-01-22 12:05:06,746 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:05:06,753 p=33295 u=zuul n=ansible | TASK [repo_setup : Create download directory path={{ cifmw_repo_setup_rhos_release_path }}, state=directory, mode=0755] *** 2026-01-22 12:05:06,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.033) 0:00:19.462 ****** 2026-01-22 12:05:06,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.033) 0:00:19.460 ****** 2026-01-22 12:05:06,767 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,776 p=33295 u=zuul n=ansible | TASK [repo_setup : Print the URL to request msg={{ cifmw_repo_setup_rhos_release_rpm }}] *** 2026-01-22 12:05:06,776 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.485 ****** 2026-01-22 12:05:06,776 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.483 ****** 2026-01-22 12:05:06,790 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,798 p=33295 u=zuul n=ansible | TASK [Download the RPM name=krb_request] *************************************** 2026-01-22 12:05:06,798 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.507 ****** 2026-01-22 12:05:06,798 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.505 ****** 2026-01-22 12:05:06,812 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,820 p=33295 u=zuul n=ansible | TASK [repo_setup : Install RHOS Release tool name={{ cifmw_repo_setup_rhos_release_rpm if cifmw_repo_setup_rhos_release_rpm is not url else cifmw_krb_request_out.path }}, state=present, disable_gpg_check={{ cifmw_repo_setup_rhos_release_gpg_check | bool }}] *** 2026-01-22 12:05:06,820 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.021) 0:00:19.529 ****** 2026-01-22 12:05:06,820 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.527 ****** 2026-01-22 12:05:06,833 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,841 p=33295 u=zuul n=ansible | TASK [repo_setup : Get rhos-release tool version _raw_params=rhos-release --version] *** 2026-01-22 12:05:06,841 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.021) 0:00:19.551 ****** 2026-01-22 12:05:06,841 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.021) 0:00:19.549 ****** 2026-01-22 12:05:06,861 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,871 p=33295 u=zuul n=ansible | TASK [repo_setup : Print rhos-release tool version msg={{ rr_version.stdout }}] *** 2026-01-22 12:05:06,871 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.030) 0:00:19.581 ****** 2026-01-22 12:05:06,872 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.030) 0:00:19.579 ****** 2026-01-22 12:05:06,891 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,900 p=33295 u=zuul n=ansible | TASK [repo_setup : Generate repos using rhos-release {{ cifmw_repo_setup_rhos_release_args }} _raw_params=rhos-release {{ cifmw_repo_setup_rhos_release_args }} \ -t {{ cifmw_repo_setup_output }}] *** 2026-01-22 12:05:06,900 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.028) 0:00:19.609 ****** 2026-01-22 12:05:06,900 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.028) 0:00:19.607 ****** 2026-01-22 12:05:06,914 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,922 p=33295 u=zuul n=ansible | TASK [repo_setup : Check for /etc/ci/mirror_info.sh path=/etc/ci/mirror_info.sh] *** 2026-01-22 12:05:06,922 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.021) 0:00:19.631 ****** 2026-01-22 12:05:06,922 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.629 ****** 2026-01-22 12:05:07,114 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:05:07,122 p=33295 u=zuul n=ansible | TASK [repo_setup : Use RDO proxy mirrors chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|https://trunk.rdoproject.org|$NODEPOOL_RDO_PROXY|g" *.repo ] *** 2026-01-22 12:05:07,122 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.200) 0:00:19.831 ****** 2026-01-22 12:05:07,122 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.200) 0:00:19.829 ****** 2026-01-22 12:05:07,342 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:07,349 p=33295 u=zuul n=ansible | TASK [repo_setup : Use RDO CentOS mirrors (remove CentOS 10 conditional when Nodepool mirrors exist) chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|http://mirror.stream.centos.org|$NODEPOOL_CENTOS_MIRROR|g" *.repo ] *** 2026-01-22 12:05:07,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.227) 0:00:20.059 ****** 2026-01-22 12:05:07,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.227) 0:00:20.057 ****** 2026-01-22 12:05:07,567 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:07,576 p=33295 u=zuul n=ansible | TASK [repo_setup : Check for gating.repo file on content provider url=http://{{ content_provider_registry_ip }}:8766/gating.repo] *** 2026-01-22 12:05:07,576 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.226) 0:00:20.286 ****** 2026-01-22 12:05:07,577 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.226) 0:00:20.284 ****** 2026-01-22 12:05:07,598 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,605 p=33295 u=zuul n=ansible | TASK [repo_setup : Populate gating repo from content provider ip content=[gating-repo] baseurl=http://{{ content_provider_registry_ip }}:8766/ enabled=1 gpgcheck=0 priority=1 , dest={{ cifmw_repo_setup_output }}/gating.repo, mode=0644] *** 2026-01-22 12:05:07,605 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.028) 0:00:20.315 ****** 2026-01-22 12:05:07,605 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.028) 0:00:20.313 ****** 2026-01-22 12:05:07,632 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,644 p=33295 u=zuul n=ansible | TASK [repo_setup : Check for DLRN repo at the destination path={{ cifmw_repo_setup_output }}/delorean.repo] *** 2026-01-22 12:05:07,644 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.038) 0:00:20.353 ****** 2026-01-22 12:05:07,644 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.038) 0:00:20.351 ****** 2026-01-22 12:05:07,673 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,683 p=33295 u=zuul n=ansible | TASK [repo_setup : Lower the priority of DLRN repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}/delorean.repo, regexp=priority=1, replace=priority=20] *** 2026-01-22 12:05:07,683 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.039) 0:00:20.393 ****** 2026-01-22 12:05:07,683 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.039) 0:00:20.391 ****** 2026-01-22 12:05:07,708 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,719 p=33295 u=zuul n=ansible | TASK [repo_setup : Check for DLRN component repo path={{ cifmw_repo_setup_output }}/{{ _comp_repo }}] *** 2026-01-22 12:05:07,719 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.036) 0:00:20.429 ****** 2026-01-22 12:05:07,720 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.036) 0:00:20.427 ****** 2026-01-22 12:05:07,745 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,752 p=33295 u=zuul n=ansible | TASK [repo_setup : Lower the priority of componennt repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}//{{ _comp_repo }}, regexp=priority=1, replace=priority=2] *** 2026-01-22 12:05:07,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.033) 0:00:20.462 ****** 2026-01-22 12:05:07,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.033) 0:00:20.460 ****** 2026-01-22 12:05:07,782 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,789 p=33295 u=zuul n=ansible | TASK [repo_setup : Find existing repos from /etc/yum.repos.d directory paths=/etc/yum.repos.d/, patterns=*.repo, recurse=False] *** 2026-01-22 12:05:07,789 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.036) 0:00:20.499 ****** 2026-01-22 12:05:07,789 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.036) 0:00:20.497 ****** 2026-01-22 12:05:08,123 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:05:08,134 p=33295 u=zuul n=ansible | TASK [repo_setup : Remove existing repos from /etc/yum.repos.d directory path={{ item }}, state=absent] *** 2026-01-22 12:05:08,134 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:08 +0000 (0:00:00.344) 0:00:20.843 ****** 2026-01-22 12:05:08,134 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:08 +0000 (0:00:00.344) 0:00:20.841 ****** 2026-01-22 12:05:08,346 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos-addons.repo) 2026-01-22 12:05:08,530 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos.repo) 2026-01-22 12:05:08,539 p=33295 u=zuul n=ansible | TASK [repo_setup : Cleanup existing metadata _raw_params=dnf clean metadata] *** 2026-01-22 12:05:08,540 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:08 +0000 (0:00:00.405) 0:00:21.249 ****** 2026-01-22 12:05:08,540 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:08 +0000 (0:00:00.405) 0:00:21.247 ****** 2026-01-22 12:05:09,032 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:09,040 p=33295 u=zuul n=ansible | TASK [repo_setup : Copy generated repos to /etc/yum.repos.d directory mode=0755, remote_src=True, src={{ cifmw_repo_setup_output }}/, dest=/etc/yum.repos.d] *** 2026-01-22 12:05:09,040 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.500) 0:00:21.750 ****** 2026-01-22 12:05:09,040 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.500) 0:00:21.748 ****** 2026-01-22 12:05:09,299 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:09,313 p=33295 u=zuul n=ansible | TASK [ci_setup : Gather variables for each operating system _raw_params={{ item }}] *** 2026-01-22 12:05:09,313 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.272) 0:00:22.022 ****** 2026-01-22 12:05:09,313 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.272) 0:00:22.020 ****** 2026-01-22 12:05:09,349 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_setup/vars/redhat.yml) 2026-01-22 12:05:09,358 p=33295 u=zuul n=ansible | TASK [ci_setup : List packages to install var=cifmw_ci_setup_packages] ********* 2026-01-22 12:05:09,358 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.044) 0:00:22.067 ****** 2026-01-22 12:05:09,358 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.044) 0:00:22.065 ****** 2026-01-22 12:05:09,375 p=33295 u=zuul n=ansible | ok: [localhost] => cifmw_ci_setup_packages: - bash-completion - ca-certificates - git-core - make - tar - tmux - python3-pip 2026-01-22 12:05:09,382 p=33295 u=zuul n=ansible | TASK [ci_setup : Install needed packages name={{ cifmw_ci_setup_packages }}, state=latest] *** 2026-01-22 12:05:09,382 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.024) 0:00:22.092 ****** 2026-01-22 12:05:09,382 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.024) 0:00:22.090 ****** 2026-01-22 12:05:52,717 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:52,732 p=33295 u=zuul n=ansible | TASK [ci_setup : Gather version of openshift client _raw_params=oc version --client -o yaml] *** 2026-01-22 12:05:52,732 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:52 +0000 (0:00:43.349) 0:01:05.441 ****** 2026-01-22 12:05:52,732 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:52 +0000 (0:00:43.349) 0:01:05.439 ****** 2026-01-22 12:05:52,914 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:05:52,922 p=33295 u=zuul n=ansible | TASK [ci_setup : Ensure openshift client install path is present path={{ cifmw_ci_setup_oc_install_path }}, state=directory, mode=0755] *** 2026-01-22 12:05:52,922 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:52 +0000 (0:00:00.190) 0:01:05.631 ****** 2026-01-22 12:05:52,922 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:52 +0000 (0:00:00.190) 0:01:05.629 ****** 2026-01-22 12:05:53,104 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:53,112 p=33295 u=zuul n=ansible | TASK [ci_setup : Install openshift client src={{ cifmw_ci_setup_openshift_client_download_uri }}/{{ cifmw_ci_setup_openshift_client_version }}/openshift-client-linux.tar.gz, dest={{ cifmw_ci_setup_oc_install_path }}, remote_src=True, mode=0755, creates={{ cifmw_ci_setup_oc_install_path }}/oc] *** 2026-01-22 12:05:53,113 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:53 +0000 (0:00:00.190) 0:01:05.822 ****** 2026-01-22 12:05:53,113 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:53 +0000 (0:00:00.190) 0:01:05.820 ****** 2026-01-22 12:05:58,406 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:58,414 p=33295 u=zuul n=ansible | TASK [ci_setup : Add the OC path to cifmw_path if needed cifmw_path={{ cifmw_ci_setup_oc_install_path }}:{{ ansible_env.PATH }}, cacheable=True] *** 2026-01-22 12:05:58,414 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:05.301) 0:01:11.123 ****** 2026-01-22 12:05:58,414 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:05.301) 0:01:11.121 ****** 2026-01-22 12:05:58,440 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:58,448 p=33295 u=zuul n=ansible | TASK [ci_setup : Create completion file] *************************************** 2026-01-22 12:05:58,448 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:00.033) 0:01:11.157 ****** 2026-01-22 12:05:58,448 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:00.033) 0:01:11.155 ****** 2026-01-22 12:05:58,765 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:58,773 p=33295 u=zuul n=ansible | TASK [ci_setup : Source completion from within .bashrc create=True, mode=0644, path={{ ansible_user_dir }}/.bashrc, block=if [ -f ~/.oc_completion ]; then source ~/.oc_completion fi] *** 2026-01-22 12:05:58,773 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:00.325) 0:01:11.482 ****** 2026-01-22 12:05:58,773 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:00.325) 0:01:11.480 ****** 2026-01-22 12:05:59,083 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:59,091 p=33295 u=zuul n=ansible | TASK [ci_setup : Check rhsm status _raw_params=subscription-manager status] **** 2026-01-22 12:05:59,091 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.317) 0:01:11.800 ****** 2026-01-22 12:05:59,091 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.317) 0:01:11.798 ****** 2026-01-22 12:05:59,107 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,115 p=33295 u=zuul n=ansible | TASK [ci_setup : Gather the repos to be enabled _repos={{ cifmw_ci_setup_rhel_rhsm_default_repos + (cifmw_ci_setup_rhel_rhsm_extra_repos | default([])) }}] *** 2026-01-22 12:05:59,115 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.023) 0:01:11.824 ****** 2026-01-22 12:05:59,115 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.023) 0:01:11.822 ****** 2026-01-22 12:05:59,130 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,137 p=33295 u=zuul n=ansible | TASK [ci_setup : Enabling the required repositories. name={{ item }}, state={{ rhsm_repo_state | default('enabled') }}] *** 2026-01-22 12:05:59,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.847 ****** 2026-01-22 12:05:59,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.845 ****** 2026-01-22 12:05:59,153 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,160 p=33295 u=zuul n=ansible | TASK [ci_setup : Get current /etc/redhat-release _raw_params=cat /etc/redhat-release] *** 2026-01-22 12:05:59,160 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.023) 0:01:11.870 ****** 2026-01-22 12:05:59,160 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.023) 0:01:11.868 ****** 2026-01-22 12:05:59,176 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,183 p=33295 u=zuul n=ansible | TASK [ci_setup : Print current /etc/redhat-release msg={{ _current_rh_release.stdout }}] *** 2026-01-22 12:05:59,183 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.893 ****** 2026-01-22 12:05:59,183 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.891 ****** 2026-01-22 12:05:59,198 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,206 p=33295 u=zuul n=ansible | TASK [ci_setup : Ensure the repos are enabled in the system using yum name={{ item.name }}, baseurl={{ item.baseurl }}, description={{ item.description | default(item.name) }}, gpgcheck={{ item.gpgcheck | default(false) }}, enabled=True, state={{ yum_repo_state | default('present') }}] *** 2026-01-22 12:05:59,206 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.915 ****** 2026-01-22 12:05:59,206 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.913 ****** 2026-01-22 12:05:59,226 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,233 p=33295 u=zuul n=ansible | TASK [ci_setup : Manage directories path={{ item }}, state={{ directory_state }}, mode=0755, owner={{ ansible_user_id }}, group={{ ansible_user_id }}] *** 2026-01-22 12:05:59,233 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.027) 0:01:11.943 ****** 2026-01-22 12:05:59,234 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.027) 0:01:11.941 ****** 2026-01-22 12:05:59,460 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/manifests/openstack/cr) 2026-01-22 12:05:59,645 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/logs) 2026-01-22 12:05:59,835 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/tmp) 2026-01-22 12:06:00,045 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/volumes) 2026-01-22 12:06:00,239 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2026-01-22 12:06:00,256 p=33295 u=zuul n=ansible | TASK [Prepare install_yamls make targets name=install_yamls, apply={'tags': ['bootstrap']}] *** 2026-01-22 12:06:00,257 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:01.023) 0:01:12.966 ****** 2026-01-22 12:06:00,257 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:01.023) 0:01:12.964 ****** 2026-01-22 12:06:00,392 p=33295 u=zuul n=ansible | TASK [install_yamls : Ensure directories exist path={{ item }}, state=directory, mode=0755] *** 2026-01-22 12:06:00,393 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:00.135) 0:01:13.102 ****** 2026-01-22 12:06:00,393 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:00.135) 0:01:13.100 ****** 2026-01-22 12:06:00,592 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts) 2026-01-22 12:06:00,772 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes/tasks) 2026-01-22 12:06:00,961 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2026-01-22 12:06:00,970 p=33295 u=zuul n=ansible | TASK [Create variables with local repos based on Zuul items name=install_yamls, tasks_from=zuul_set_operators_repo.yml] *** 2026-01-22 12:06:00,971 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:00.578) 0:01:13.680 ****** 2026-01-22 12:06:00,971 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:00.578) 0:01:13.678 ****** 2026-01-22 12:06:01,009 p=33295 u=zuul n=ansible | TASK [install_yamls : Set fact with local repos based on Zuul items cifmw_install_yamls_operators_repo={{ cifmw_install_yamls_operators_repo | default({}) | combine(_repo_operator_info | items2dict) }}] *** 2026-01-22 12:06:01,010 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.038) 0:01:13.719 ****** 2026-01-22 12:06:01,010 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.038) 0:01:13.717 ****** 2026-01-22 12:06:01,035 p=33295 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'main', 'change': '1202', 'change_url': 'https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202', 'commit_id': 'a6dda39287a9d88f8d44f99969c3909ff61d8792', 'patchset': 'a6dda39287a9d88f8d44f99969c3909ff61d8792', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/openstack-k8s-operators/data-plane-adoption', 'name': 'openstack-k8s-operators/data-plane-adoption', 'short_name': 'data-plane-adoption', 'src_dir': 'src/github.com/openstack-k8s-operators/data-plane-adoption'}, 'topic': None}) 2026-01-22 12:06:01,036 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,043 p=33295 u=zuul n=ansible | TASK [install_yamls : Print helpful data for debugging msg=_repo_operator_name: {{ _repo_operator_name }} _repo_operator_info: {{ _repo_operator_info }} cifmw_install_yamls_operators_repo: {{ cifmw_install_yamls_operators_repo }} ] *** 2026-01-22 12:06:01,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.033) 0:01:13.753 ****** 2026-01-22 12:06:01,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.033) 0:01:13.751 ****** 2026-01-22 12:06:01,070 p=33295 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'main', 'change': '1202', 'change_url': 'https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202', 'commit_id': 'a6dda39287a9d88f8d44f99969c3909ff61d8792', 'patchset': 'a6dda39287a9d88f8d44f99969c3909ff61d8792', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/openstack-k8s-operators/data-plane-adoption', 'name': 'openstack-k8s-operators/data-plane-adoption', 'short_name': 'data-plane-adoption', 'src_dir': 'src/github.com/openstack-k8s-operators/data-plane-adoption'}, 'topic': None}) 2026-01-22 12:06:01,071 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,085 p=33295 u=zuul n=ansible | TASK [Customize install_yamls devsetup vars if needed name=install_yamls, tasks_from=customize_devsetup_vars.yml] *** 2026-01-22 12:06:01,085 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.041) 0:01:13.794 ****** 2026-01-22 12:06:01,085 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.041) 0:01:13.792 ****** 2026-01-22 12:06:01,129 p=33295 u=zuul n=ansible | TASK [install_yamls : Update opm_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^opm_version:, line=opm_version: {{ cifmw_install_yamls_opm_version }}, state=present] *** 2026-01-22 12:06:01,129 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.043) 0:01:13.838 ****** 2026-01-22 12:06:01,129 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.043) 0:01:13.836 ****** 2026-01-22 12:06:01,149 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,157 p=33295 u=zuul n=ansible | TASK [install_yamls : Update sdk_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^sdk_version:, line=sdk_version: {{ cifmw_install_yamls_sdk_version }}, state=present] *** 2026-01-22 12:06:01,157 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.028) 0:01:13.866 ****** 2026-01-22 12:06:01,157 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.028) 0:01:13.864 ****** 2026-01-22 12:06:01,179 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,186 p=33295 u=zuul n=ansible | TASK [install_yamls : Update go_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^go_version:, line=go_version: {{ cifmw_install_yamls_go_version }}, state=present] *** 2026-01-22 12:06:01,186 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.029) 0:01:13.896 ****** 2026-01-22 12:06:01,187 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.029) 0:01:13.894 ****** 2026-01-22 12:06:01,207 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,214 p=33295 u=zuul n=ansible | TASK [install_yamls : Update kustomize_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^kustomize_version:, line=kustomize_version: {{ cifmw_install_yamls_kustomize_version }}, state=present] *** 2026-01-22 12:06:01,214 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.027) 0:01:13.923 ****** 2026-01-22 12:06:01,214 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.027) 0:01:13.921 ****** 2026-01-22 12:06:01,234 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,250 p=33295 u=zuul n=ansible | TASK [install_yamls : Compute the cifmw_install_yamls_vars final value _install_yamls_override_vars={{ _install_yamls_override_vars | default({}) | combine(item, recursive=True) }}] *** 2026-01-22 12:06:01,250 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.036) 0:01:13.960 ****** 2026-01-22 12:06:01,250 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.036) 0:01:13.958 ****** 2026-01-22 12:06:01,318 p=33295 u=zuul n=ansible | ok: [localhost] => (item={'BMO_SETUP': False}) 2026-01-22 12:06:01,327 p=33295 u=zuul n=ansible | TASK [install_yamls : Set environment override cifmw_install_yamls_environment fact cifmw_install_yamls_environment={{ _install_yamls_override_vars.keys() | map('upper') | zip(_install_yamls_override_vars.values()) | items2dict(key_name=0, value_name=1) | combine({ 'OUT': cifmw_install_yamls_manifests_dir, 'OUTPUT_DIR': cifmw_install_yamls_edpm_dir, 'CHECKOUT_FROM_OPENSTACK_REF': cifmw_install_yamls_checkout_openstack_ref, 'OPENSTACK_K8S_BRANCH': (zuul is defined and not zuul.branch |regex_search('master|antelope|rhos')) | ternary(zuul.branch, 'main') }) | combine(install_yamls_operators_repos) }}, cacheable=True] *** 2026-01-22 12:06:01,328 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.077) 0:01:14.037 ****** 2026-01-22 12:06:01,328 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.077) 0:01:14.035 ****** 2026-01-22 12:06:01,365 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:01,371 p=33295 u=zuul n=ansible | TASK [install_yamls : Get environment structure base_path={{ cifmw_install_yamls_repo }}] *** 2026-01-22 12:06:01,371 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.043) 0:01:14.081 ****** 2026-01-22 12:06:01,372 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.043) 0:01:14.079 ****** 2026-01-22 12:06:01,944 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:01,955 p=33295 u=zuul n=ansible | TASK [install_yamls : Ensure Output directory exists path={{ cifmw_install_yamls_out_dir }}, state=directory, mode=0755] *** 2026-01-22 12:06:01,955 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.583) 0:01:14.665 ****** 2026-01-22 12:06:01,955 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.583) 0:01:14.663 ****** 2026-01-22 12:06:02,150 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:02,157 p=33295 u=zuul n=ansible | TASK [install_yamls : Ensure user cifmw_install_yamls_vars contains existing Makefile variables that=_cifmw_install_yamls_unmatched_vars | length == 0, msg=cifmw_install_yamls_vars contains a variable that is not defined in install_yamls Makefile nor cifmw_install_yamls_whitelisted_vars: {{ _cifmw_install_yamls_unmatched_vars | join(', ')}}, quiet=True] *** 2026-01-22 12:06:02,158 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.202) 0:01:14.867 ****** 2026-01-22 12:06:02,158 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.202) 0:01:14.865 ****** 2026-01-22 12:06:02,197 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:02,211 p=33295 u=zuul n=ansible | TASK [install_yamls : Generate /home/zuul/ci-framework-data/artifacts/install_yamls.sh dest={{ cifmw_install_yamls_out_dir }}/{{ cifmw_install_yamls_envfile }}, content={% for k,v in cifmw_install_yamls_environment.items() %} export {{ k }}={{ v }} {% endfor %}, mode=0644] *** 2026-01-22 12:06:02,211 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.053) 0:01:14.920 ****** 2026-01-22 12:06:02,211 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.053) 0:01:14.918 ****** 2026-01-22 12:06:02,636 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:02,643 p=33295 u=zuul n=ansible | TASK [install_yamls : Set install_yamls default values cifmw_install_yamls_defaults={{ get_makefiles_env_output.makefiles_values | combine(cifmw_install_yamls_environment) }}, cacheable=True] *** 2026-01-22 12:06:02,643 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.432) 0:01:15.353 ****** 2026-01-22 12:06:02,643 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.432) 0:01:15.351 ****** 2026-01-22 12:06:02,669 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:02,678 p=33295 u=zuul n=ansible | TASK [install_yamls : Show the env structure var=cifmw_install_yamls_environment] *** 2026-01-22 12:06:02,678 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.034) 0:01:15.387 ****** 2026-01-22 12:06:02,678 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.034) 0:01:15.385 ****** 2026-01-22 12:06:02,698 p=33295 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_environment: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm 2026-01-22 12:06:02,708 p=33295 u=zuul n=ansible | TASK [install_yamls : Show the env structure defaults var=cifmw_install_yamls_defaults] *** 2026-01-22 12:06:02,708 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.029) 0:01:15.417 ****** 2026-01-22 12:06:02,708 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.029) 0:01:15.415 ****** 2026-01-22 12:06:02,741 p=33295 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_OS_IMG_TYPE: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: false BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 767c3ed056cbaa3b9dfedb8c6f825bf0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '1234567842' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12345678' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: osp-secret SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' 2026-01-22 12:06:02,752 p=33295 u=zuul n=ansible | TASK [install_yamls : Generate make targets install_yamls_path={{ cifmw_install_yamls_repo }}, output_directory={{ cifmw_install_yamls_tasks_out }}] *** 2026-01-22 12:06:02,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.044) 0:01:15.462 ****** 2026-01-22 12:06:02,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.044) 0:01:15.460 ****** 2026-01-22 12:06:03,097 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:03,107 p=33295 u=zuul n=ansible | TASK [install_yamls : Debug generate_make module var=cifmw_generate_makes] ***** 2026-01-22 12:06:03,107 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.354) 0:01:15.816 ****** 2026-01-22 12:06:03,107 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.354) 0:01:15.814 ****** 2026-01-22 12:06:03,130 p=33295 u=zuul n=ansible | ok: [localhost] => cifmw_generate_makes: changed: false debug: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/Makefile: - all - help - cleanup - deploy_cleanup - wait - crc_storage - crc_storage_cleanup - crc_storage_release - crc_storage_with_retries - crc_storage_cleanup_with_retries - operator_namespace - namespace - namespace_cleanup - input - input_cleanup - crc_bmo_setup - crc_bmo_cleanup - openstack_prep - openstack - openstack_wait - openstack_init - openstack_cleanup - openstack_repo - openstack_deploy_prep - openstack_deploy - openstack_wait_deploy - openstack_deploy_cleanup - openstack_update_run - update_services - update_system - openstack_patch_version - edpm_deploy_generate_keys - edpm_patch_ansible_runner_image - edpm_deploy_prep - edpm_deploy_cleanup - edpm_deploy - edpm_deploy_baremetal_prep - edpm_deploy_baremetal - edpm_wait_deploy_baremetal - edpm_wait_deploy - edpm_register_dns - edpm_nova_discover_hosts - openstack_crds - openstack_crds_cleanup - edpm_deploy_networker_prep - edpm_deploy_networker_cleanup - edpm_deploy_networker - infra_prep - infra - infra_cleanup - dns_deploy_prep - dns_deploy - dns_deploy_cleanup - netconfig_deploy_prep - netconfig_deploy - netconfig_deploy_cleanup - memcached_deploy_prep - memcached_deploy - memcached_deploy_cleanup - keystone_prep - keystone - keystone_cleanup - keystone_deploy_prep - keystone_deploy - keystone_deploy_cleanup - barbican_prep - barbican - barbican_cleanup - barbican_deploy_prep - barbican_deploy - barbican_deploy_validate - barbican_deploy_cleanup - mariadb - mariadb_cleanup - mariadb_deploy_prep - mariadb_deploy - mariadb_deploy_cleanup - placement_prep - placement - placement_cleanup - placement_deploy_prep - placement_deploy - placement_deploy_cleanup - glance_prep - glance - glance_cleanup - glance_deploy_prep - glance_deploy - glance_deploy_cleanup - ovn_prep - ovn - ovn_cleanup - ovn_deploy_prep - ovn_deploy - ovn_deploy_cleanup - neutron_prep - neutron - neutron_cleanup - neutron_deploy_prep - neutron_deploy - neutron_deploy_cleanup - cinder_prep - cinder - cinder_cleanup - cinder_deploy_prep - cinder_deploy - cinder_deploy_cleanup - rabbitmq_prep - rabbitmq - rabbitmq_cleanup - rabbitmq_deploy_prep - rabbitmq_deploy - rabbitmq_deploy_cleanup - ironic_prep - ironic - ironic_cleanup - ironic_deploy_prep - ironic_deploy - ironic_deploy_cleanup - octavia_prep - octavia - octavia_cleanup - octavia_deploy_prep - octavia_deploy - octavia_deploy_cleanup - designate_prep - designate - designate_cleanup - designate_deploy_prep - designate_deploy - designate_deploy_cleanup - nova_prep - nova - nova_cleanup - nova_deploy_prep - nova_deploy - nova_deploy_cleanup - mariadb_kuttl_run - mariadb_kuttl - kuttl_db_prep - kuttl_db_cleanup - kuttl_common_prep - kuttl_common_cleanup - keystone_kuttl_run - keystone_kuttl - barbican_kuttl_run - barbican_kuttl - placement_kuttl_run - placement_kuttl - cinder_kuttl_run - cinder_kuttl - neutron_kuttl_run - neutron_kuttl - octavia_kuttl_run - octavia_kuttl - designate_kuttl - designate_kuttl_run - ovn_kuttl_run - ovn_kuttl - infra_kuttl_run - infra_kuttl - ironic_kuttl_run - ironic_kuttl - ironic_kuttl_crc - heat_kuttl_run - heat_kuttl - heat_kuttl_crc - ansibleee_kuttl_run - ansibleee_kuttl_cleanup - ansibleee_kuttl_prep - ansibleee_kuttl - glance_kuttl_run - glance_kuttl - manila_kuttl_run - manila_kuttl - swift_kuttl_run - swift_kuttl - horizon_kuttl_run - horizon_kuttl - openstack_kuttl_run - openstack_kuttl - mariadb_chainsaw_run - mariadb_chainsaw - horizon_prep - horizon - horizon_cleanup - horizon_deploy_prep - horizon_deploy - horizon_deploy_cleanup - heat_prep - heat - heat_cleanup - heat_deploy_prep - heat_deploy - heat_deploy_cleanup - ansibleee_prep - ansibleee - ansibleee_cleanup - baremetal_prep - baremetal - baremetal_cleanup - ceph_help - ceph - ceph_cleanup - rook_prep - rook - rook_deploy_prep - rook_deploy - rook_crc_disk - rook_cleanup - lvms - nmstate - nncp - nncp_cleanup - netattach - netattach_cleanup - metallb - metallb_config - metallb_config_cleanup - metallb_cleanup - loki - loki_cleanup - loki_deploy - loki_deploy_cleanup - netobserv - netobserv_cleanup - netobserv_deploy - netobserv_deploy_cleanup - manila_prep - manila - manila_cleanup - manila_deploy_prep - manila_deploy - manila_deploy_cleanup - telemetry_prep - telemetry - telemetry_cleanup - telemetry_deploy_prep - telemetry_deploy - telemetry_deploy_cleanup - telemetry_kuttl_run - telemetry_kuttl - swift_prep - swift - swift_cleanup - swift_deploy_prep - swift_deploy - swift_deploy_cleanup - certmanager - certmanager_cleanup - validate_marketplace - redis_deploy_prep - redis_deploy - redis_deploy_cleanup - set_slower_etcd_profile /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/Makefile: - help - download_tools - nfs - nfs_cleanup - crc - crc_cleanup - crc_scrub - crc_attach_default_interface - crc_attach_default_interface_cleanup - ipv6_lab_network - ipv6_lab_network_cleanup - ipv6_lab_nat64_router - ipv6_lab_nat64_router_cleanup - ipv6_lab_sno - ipv6_lab_sno_cleanup - ipv6_lab - ipv6_lab_cleanup - attach_default_interface - attach_default_interface_cleanup - network_isolation_bridge - network_isolation_bridge_cleanup - edpm_baremetal_compute - edpm_compute - edpm_compute_bootc - edpm_ansible_runner - edpm_computes_bgp - edpm_compute_repos - edpm_compute_cleanup - edpm_networker - edpm_networker_cleanup - edpm_deploy_instance - tripleo_deploy - standalone_deploy - standalone_sync - standalone - standalone_cleanup - standalone_snapshot - standalone_revert - cifmw_prepare - cifmw_cleanup - bmaas_network - bmaas_network_cleanup - bmaas_route_crc_and_crc_bmaas_networks - bmaas_route_crc_and_crc_bmaas_networks_cleanup - bmaas_crc_attach_network - bmaas_crc_attach_network_cleanup - bmaas_crc_baremetal_bridge - bmaas_crc_baremetal_bridge_cleanup - bmaas_baremetal_net_nad - bmaas_baremetal_net_nad_cleanup - bmaas_metallb - bmaas_metallb_cleanup - bmaas_virtual_bms - bmaas_virtual_bms_cleanup - bmaas_sushy_emulator - bmaas_sushy_emulator_cleanup - bmaas_sushy_emulator_wait - bmaas_generate_nodes_yaml - bmaas - bmaas_cleanup failed: false success: true 2026-01-22 12:06:03,142 p=33295 u=zuul n=ansible | TASK [install_yamls : Create the install_yamls parameters file dest={{ cifmw_basedir }}/artifacts/parameters/install-yamls-params.yml, content={{ { 'cifmw_install_yamls_environment': cifmw_install_yamls_environment, 'cifmw_install_yamls_defaults': cifmw_install_yamls_defaults } | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:06:03,142 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.035) 0:01:15.852 ****** 2026-01-22 12:06:03,143 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.035) 0:01:15.850 ****** 2026-01-22 12:06:03,529 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:03,538 p=33295 u=zuul n=ansible | TASK [install_yamls : Create empty cifmw_install_yamls_environment if needed cifmw_install_yamls_environment={}] *** 2026-01-22 12:06:03,538 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.395) 0:01:16.247 ****** 2026-01-22 12:06:03,538 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.395) 0:01:16.245 ****** 2026-01-22 12:06:03,556 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:03,572 p=33295 u=zuul n=ansible | TASK [discover_latest_image : Get latest image url={{ cifmw_discover_latest_image_base_url }}, image_prefix={{ cifmw_discover_latest_image_qcow_prefix }}, images_file={{ cifmw_discover_latest_image_images_file }}] *** 2026-01-22 12:06:03,572 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.034) 0:01:16.282 ****** 2026-01-22 12:06:03,573 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.034) 0:01:16.280 ****** 2026-01-22 12:06:04,201 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:04,209 p=33295 u=zuul n=ansible | TASK [discover_latest_image : Export facts accordingly cifmw_discovered_image_name={{ discovered_image['data']['image_name'] }}, cifmw_discovered_image_url={{ discovered_image['data']['image_url'] }}, cifmw_discovered_hash={{ discovered_image['data']['hash'] }}, cifmw_discovered_hash_algorithm={{ discovered_image['data']['hash_algorithm'] }}, cacheable=True] *** 2026-01-22 12:06:04,210 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.637) 0:01:16.919 ****** 2026-01-22 12:06:04,210 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.637) 0:01:16.917 ****** 2026-01-22 12:06:04,234 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:04,249 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Create artifacts with custom params mode=0644, dest={{ cifmw_basedir }}/artifacts/parameters/custom-params.yml, content={{ ci_framework_params | to_nice_yaml }}] *** 2026-01-22 12:06:04,249 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.039) 0:01:16.959 ****** 2026-01-22 12:06:04,250 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.039) 0:01:16.957 ****** 2026-01-22 12:06:04,656 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:04,672 p=33295 u=zuul n=ansible | TASK [Dump parameters to a file to pass them easily to cifmw playbooks src={{ playbook_dir }}/files/ci_framework_params.yaml.j2, dest={{ cifmw_parameters_file }}] *** 2026-01-22 12:06:04,672 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.422) 0:01:17.381 ****** 2026-01-22 12:06:04,672 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.422) 0:01:17.379 ****** 2026-01-22 12:06:05,124 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:05,143 p=33295 u=zuul n=ansible | TASK [Set vars as fact - cifmw parameters name=cifmw_helpers, tasks_from=var_file.yml] *** 2026-01-22 12:06:05,143 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.471) 0:01:17.852 ****** 2026-01-22 12:06:05,143 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.471) 0:01:17.850 ****** 2026-01-22 12:06:05,301 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Fail if file is not yaml or yml extension msg=File needs to be yaml/yml extension] *** 2026-01-22 12:06:05,302 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.158) 0:01:18.011 ****** 2026-01-22 12:06:05,302 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.158) 0:01:18.009 ****** 2026-01-22 12:06:05,328 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:05,340 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Check if file is available path={{ provided_file | trim }}] *** 2026-01-22 12:06:05,340 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.038) 0:01:18.049 ****** 2026-01-22 12:06:05,340 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.038) 0:01:18.047 ****** 2026-01-22 12:06:05,530 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:05,538 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Read the vars] ******************************************* 2026-01-22 12:06:05,538 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.198) 0:01:18.248 ****** 2026-01-22 12:06:05,538 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.198) 0:01:18.246 ****** 2026-01-22 12:06:05,703 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:05,715 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Set vars as fact] **************************************** 2026-01-22 12:06:05,716 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.177) 0:01:18.425 ****** 2026-01-22 12:06:05,716 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.177) 0:01:18.423 ****** 2026-01-22 12:06:05,751 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,757 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,763 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,775 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,781 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,788 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,795 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,801 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,806 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,812 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,818 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,820 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,853 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:05,874 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:06:05,874 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.158) 0:01:18.583 ****** 2026-01-22 12:06:05,874 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.158) 0:01:18.581 ****** 2026-01-22 12:06:05,931 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:05,940 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:06:05,940 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.066) 0:01:18.649 ****** 2026-01-22 12:06:05,940 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.065) 0:01:18.647 ****** 2026-01-22 12:06:06,010 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,022 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_infra _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:06:06,022 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.081) 0:01:18.731 ****** 2026-01-22 12:06:06,022 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.082) 0:01:18.729 ****** 2026-01-22 12:06:06,168 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': 'Download needed tools', 'inventory': 'localhost,', 'connection': 'local', 'type': 'playbook', 'source': '/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml'}) 2026-01-22 12:06:06,180 p=33295 u=zuul n=ansible | TASK [run_hook : Set playbook path for Download needed tools cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e namespace={{ cifmw_openstack_namespace }} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2026-01-22 12:06:06,180 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.158) 0:01:18.890 ****** 2026-01-22 12:06:06,181 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.158) 0:01:18.888 ****** 2026-01-22 12:06:06,224 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,232 p=33295 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2026-01-22 12:06:06,232 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.051) 0:01:18.942 ****** 2026-01-22 12:06:06,232 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.051) 0:01:18.940 ****** 2026-01-22 12:06:06,433 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,452 p=33295 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2026-01-22 12:06:06,452 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.219) 0:01:19.162 ****** 2026-01-22 12:06:06,452 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.220) 0:01:19.160 ****** 2026-01-22 12:06:06,475 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:06,485 p=33295 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2026-01-22 12:06:06,485 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.032) 0:01:19.194 ****** 2026-01-22 12:06:06,485 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.032) 0:01:19.192 ****** 2026-01-22 12:06:06,678 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,693 p=33295 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2026-01-22 12:06:06,693 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.207) 0:01:19.402 ****** 2026-01-22 12:06:06,693 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.208) 0:01:19.400 ****** 2026-01-22 12:06:06,720 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,730 p=33295 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2026-01-22 12:06:06,730 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.036) 0:01:19.439 ****** 2026-01-22 12:06:06,730 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.036) 0:01:19.437 ****** 2026-01-22 12:06:06,918 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,927 p=33295 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2026-01-22 12:06:06,927 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.197) 0:01:19.637 ****** 2026-01-22 12:06:06,927 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.197) 0:01:19.635 ****** 2026-01-22 12:06:07,106 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:07,118 p=33295 u=zuul n=ansible | TASK [run_hook : Run hook without retry - Download needed tools] *************** 2026-01-22 12:06:07,118 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:07 +0000 (0:00:00.190) 0:01:19.827 ****** 2026-01-22 12:06:07,118 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:07 +0000 (0:00:00.190) 0:01:19.825 ****** 2026-01-22 12:06:07,168 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_000_run_hook_without_retry.log 2026-01-22 12:06:41,355 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:41,366 p=33295 u=zuul n=ansible | TASK [run_hook : Run hook with retry - Download needed tools] ****************** 2026-01-22 12:06:41,367 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:34.248) 0:01:54.076 ****** 2026-01-22 12:06:41,367 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:34.248) 0:01:54.074 ****** 2026-01-22 12:06:41,388 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:41,397 p=33295 u=zuul n=ansible | TASK [run_hook : Check if we have a file path={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2026-01-22 12:06:41,397 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.030) 0:01:54.106 ****** 2026-01-22 12:06:41,397 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.030) 0:01:54.105 ****** 2026-01-22 12:06:41,572 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:41,582 p=33295 u=zuul n=ansible | TASK [run_hook : Load generated content in main playbook file={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2026-01-22 12:06:41,582 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.184) 0:01:54.291 ****** 2026-01-22 12:06:41,582 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.184) 0:01:54.289 ****** 2026-01-22 12:06:41,602 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:41,698 p=33295 u=zuul n=ansible | PLAY [Prepare host virtualization] ********************************************* 2026-01-22 12:06:41,717 p=33295 u=zuul n=ansible | TASK [Include vars from cifmw_extras _raw_params={{ item | replace('@','') }}] *** 2026-01-22 12:06:41,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.135) 0:01:54.426 ****** 2026-01-22 12:06:41,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.135) 0:01:54.424 ****** 2026-01-22 12:06:41,740 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:41,748 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2026-01-22 12:06:41,749 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.031) 0:01:54.458 ****** 2026-01-22 12:06:41,749 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.031) 0:01:54.456 ****** 2026-01-22 12:06:41,792 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:41,801 p=33295 u=zuul n=ansible | TASK [Ensure libvirt is present/configured name=libvirt_manager] *************** 2026-01-22 12:06:41,801 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.052) 0:01:54.510 ****** 2026-01-22 12:06:41,801 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.052) 0:01:54.509 ****** 2026-01-22 12:06:41,824 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:41,832 p=33295 u=zuul n=ansible | TASK [Perpare OpenShift provisioner node name=openshift_provisioner_node] ****** 2026-01-22 12:06:41,832 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.030) 0:01:54.541 ****** 2026-01-22 12:06:41,832 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.030) 0:01:54.539 ****** 2026-01-22 12:06:41,853 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:41,904 p=33295 u=zuul n=ansible | PLAY [Run cifmw_setup infra, build package, container and operators, deploy EDPM] *** 2026-01-22 12:06:41,939 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2026-01-22 12:06:41,939 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.106) 0:01:54.648 ****** 2026-01-22 12:06:41,939 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.107) 0:01:54.646 ****** 2026-01-22 12:06:41,983 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:41,994 p=33295 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Environment Definition file existence path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2026-01-22 12:06:41,994 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.054) 0:01:54.703 ****** 2026-01-22 12:06:41,994 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.054) 0:01:54.701 ****** 2026-01-22 12:06:42,178 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:42,193 p=33295 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Definition file existance that=['_net_env_def_stat.stat.exists'], msg=Ensure that the Networking Environment Definition file exists in {{ cifmw_networking_mapper_networking_env_def_path }}, quiet=True] *** 2026-01-22 12:06:42,193 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.199) 0:01:54.902 ****** 2026-01-22 12:06:42,193 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.199) 0:01:54.900 ****** 2026-01-22 12:06:42,216 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,231 p=33295 u=zuul n=ansible | TASK [networking_mapper : Load the Networking Definition from file path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2026-01-22 12:06:42,232 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.038) 0:01:54.941 ****** 2026-01-22 12:06:42,232 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.038) 0:01:54.939 ****** 2026-01-22 12:06:42,256 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,269 p=33295 u=zuul n=ansible | TASK [networking_mapper : Set cifmw_networking_env_definition is present cifmw_networking_env_definition={{ _net_env_def_slurp['content'] | b64decode | from_yaml }}, cacheable=True] *** 2026-01-22 12:06:42,269 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.037) 0:01:54.978 ****** 2026-01-22 12:06:42,269 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.037) 0:01:54.976 ****** 2026-01-22 12:06:42,292 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,315 p=33295 u=zuul n=ansible | TASK [Deploy OCP using Hive name=hive] ***************************************** 2026-01-22 12:06:42,315 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.045) 0:01:55.024 ****** 2026-01-22 12:06:42,315 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.045) 0:01:55.022 ****** 2026-01-22 12:06:42,340 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,349 p=33295 u=zuul n=ansible | TASK [Prepare CRC name=rhol_crc] *********************************************** 2026-01-22 12:06:42,349 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.034) 0:01:55.059 ****** 2026-01-22 12:06:42,349 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.034) 0:01:55.057 ****** 2026-01-22 12:06:42,372 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,382 p=33295 u=zuul n=ansible | TASK [Deploy OpenShift cluster using dev-scripts name=devscripts] ************** 2026-01-22 12:06:42,382 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.033) 0:01:55.092 ****** 2026-01-22 12:06:42,382 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.033) 0:01:55.090 ****** 2026-01-22 12:06:42,406 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,414 p=33295 u=zuul n=ansible | TASK [openshift_login : Ensure output directory exists path={{ cifmw_openshift_login_basedir }}/artifacts, state=directory, mode=0755] *** 2026-01-22 12:06:42,415 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.032) 0:01:55.124 ****** 2026-01-22 12:06:42,415 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.032) 0:01:55.122 ****** 2026-01-22 12:06:42,597 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:42,610 p=33295 u=zuul n=ansible | TASK [openshift_login : OpenShift login _raw_params=login.yml] ***************** 2026-01-22 12:06:42,610 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.195) 0:01:55.319 ****** 2026-01-22 12:06:42,610 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.195) 0:01:55.317 ****** 2026-01-22 12:06:42,649 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/login.yml for localhost 2026-01-22 12:06:42,669 p=33295 u=zuul n=ansible | TASK [openshift_login : Check if the password file is present path={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2026-01-22 12:06:42,669 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.059) 0:01:55.378 ****** 2026-01-22 12:06:42,669 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.059) 0:01:55.377 ****** 2026-01-22 12:06:42,693 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,702 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch user password content src={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2026-01-22 12:06:42,702 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.033) 0:01:55.412 ****** 2026-01-22 12:06:42,702 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.033) 0:01:55.410 ****** 2026-01-22 12:06:42,725 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,734 p=33295 u=zuul n=ansible | TASK [openshift_login : Set user password as a fact cifmw_openshift_login_password={{ cifmw_openshift_login_password_file_slurp.content | b64decode }}, cacheable=True] *** 2026-01-22 12:06:42,734 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.031) 0:01:55.443 ****** 2026-01-22 12:06:42,734 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.031) 0:01:55.441 ****** 2026-01-22 12:06:42,762 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,772 p=33295 u=zuul n=ansible | TASK [openshift_login : Set role variables cifmw_openshift_login_kubeconfig={{ cifmw_openshift_login_kubeconfig | default(cifmw_openshift_kubeconfig) | default( ansible_env.KUBECONFIG if 'KUBECONFIG' in ansible_env else cifmw_openshift_login_kubeconfig_default_path ) | trim }}, cifmw_openshift_login_user={{ cifmw_openshift_login_user | default(cifmw_openshift_user) | default(omit) }}, cifmw_openshift_login_password={{ cifmw_openshift_login_password | default(cifmw_openshift_password) | default(omit) }}, cifmw_openshift_login_api={{ cifmw_openshift_login_api | default(cifmw_openshift_api) | default(omit) }}, cifmw_openshift_login_cert_login={{ cifmw_openshift_login_cert_login | default(false)}}, cifmw_openshift_login_provided_token={{ cifmw_openshift_provided_token | default(omit) }}, cacheable=True] *** 2026-01-22 12:06:42,772 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.038) 0:01:55.481 ****** 2026-01-22 12:06:42,772 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.038) 0:01:55.479 ****** 2026-01-22 12:06:42,805 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:42,813 p=33295 u=zuul n=ansible | TASK [openshift_login : Check if kubeconfig exists path={{ cifmw_openshift_login_kubeconfig }}] *** 2026-01-22 12:06:42,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.040) 0:01:55.522 ****** 2026-01-22 12:06:42,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.040) 0:01:55.520 ****** 2026-01-22 12:06:42,988 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:43,001 p=33295 u=zuul n=ansible | TASK [openshift_login : Assert that enough data is provided to log in to OpenShift that=cifmw_openshift_login_kubeconfig_stat.stat.exists or (cifmw_openshift_login_provided_token is defined and cifmw_openshift_login_provided_token != '') or ( (cifmw_openshift_login_user is defined) and (cifmw_openshift_login_password is defined) and (cifmw_openshift_login_api is defined) ), msg=If an existing kubeconfig is not provided user/pwd or provided/initial token and API URL must be given] *** 2026-01-22 12:06:43,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.188) 0:01:55.710 ****** 2026-01-22 12:06:43,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.188) 0:01:55.709 ****** 2026-01-22 12:06:43,034 p=33295 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:06:43,050 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch kubeconfig content src={{ cifmw_openshift_login_kubeconfig }}] *** 2026-01-22 12:06:43,051 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.049) 0:01:55.760 ****** 2026-01-22 12:06:43,051 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.049) 0:01:55.758 ****** 2026-01-22 12:06:43,076 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:43,091 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch x509 key based users cifmw_openshift_login_key_based_users={{ ( cifmw_openshift_login_kubeconfig_content_b64.content | b64decode | from_yaml ). users | default([]) | selectattr('user.client-certificate-data', 'defined') | map(attribute="name") | map("split", "/") | map("first") }}, cacheable=True] *** 2026-01-22 12:06:43,091 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.040) 0:01:55.800 ****** 2026-01-22 12:06:43,091 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.040) 0:01:55.798 ****** 2026-01-22 12:06:43,113 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:43,123 p=33295 u=zuul n=ansible | TASK [openshift_login : Assign key based user if not provided and available cifmw_openshift_login_user={{ (cifmw_openshift_login_assume_cert_system_user | ternary('system:', '')) + (cifmw_openshift_login_key_based_users | map('replace', 'system:', '') | unique | first) }}, cifmw_openshift_login_cert_login=True, cacheable=True] *** 2026-01-22 12:06:43,123 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.032) 0:01:55.832 ****** 2026-01-22 12:06:43,123 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.032) 0:01:55.830 ****** 2026-01-22 12:06:43,145 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:43,156 p=33295 u=zuul n=ansible | TASK [openshift_login : Set the retry count cifmw_openshift_login_retries_cnt={{ 0 if cifmw_openshift_login_retries_cnt is undefined else cifmw_openshift_login_retries_cnt|int + 1 }}] *** 2026-01-22 12:06:43,156 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.033) 0:01:55.865 ****** 2026-01-22 12:06:43,156 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.033) 0:01:55.863 ****** 2026-01-22 12:06:43,183 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:43,194 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch token _raw_params=try_login.yml] ***************** 2026-01-22 12:06:43,194 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.037) 0:01:55.903 ****** 2026-01-22 12:06:43,194 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.037) 0:01:55.901 ****** 2026-01-22 12:06:43,220 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/try_login.yml for localhost 2026-01-22 12:06:43,236 p=33295 u=zuul n=ansible | TASK [openshift_login : Try get OpenShift access token _raw_params=oc whoami -t] *** 2026-01-22 12:06:43,236 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.042) 0:01:55.946 ****** 2026-01-22 12:06:43,236 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.042) 0:01:55.944 ****** 2026-01-22 12:06:43,260 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:43,272 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift token output_dir={{ cifmw_openshift_login_basedir }}/artifacts, script=oc login {%- if cifmw_openshift_login_provided_token is not defined %} {%- if cifmw_openshift_login_user is defined %} -u {{ cifmw_openshift_login_user }} {%- endif %} {%- if cifmw_openshift_login_password is defined %} -p {{ cifmw_openshift_login_password }} {%- endif %} {% else %} --token={{ cifmw_openshift_login_provided_token }} {%- endif %} {%- if cifmw_openshift_login_skip_tls_verify|bool %} --insecure-skip-tls-verify=true {%- endif %} {%- if cifmw_openshift_login_api is defined %} {{ cifmw_openshift_login_api }} {%- endif %}] *** 2026-01-22 12:06:43,272 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.035) 0:01:55.981 ****** 2026-01-22 12:06:43,272 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.035) 0:01:55.979 ****** 2026-01-22 12:06:43,336 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_001_fetch_openshift.log 2026-01-22 12:06:43,659 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:43,677 p=33295 u=zuul n=ansible | TASK [openshift_login : Ensure kubeconfig is provided that=cifmw_openshift_login_kubeconfig != ""] *** 2026-01-22 12:06:43,677 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.404) 0:01:56.386 ****** 2026-01-22 12:06:43,677 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.405) 0:01:56.385 ****** 2026-01-22 12:06:43,704 p=33295 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:06:43,721 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch new OpenShift access token _raw_params=oc whoami -t] *** 2026-01-22 12:06:43,722 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.044) 0:01:56.431 ****** 2026-01-22 12:06:43,722 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.044) 0:01:56.429 ****** 2026-01-22 12:06:44,181 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:44,197 p=33295 u=zuul n=ansible | TASK [openshift_login : Set new OpenShift token cifmw_openshift_login_token={{ (not cifmw_openshift_login_new_token_out.skipped | default(false)) | ternary(cifmw_openshift_login_new_token_out.stdout, cifmw_openshift_login_whoami_out.stdout) }}, cacheable=True] *** 2026-01-22 12:06:44,198 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.475) 0:01:56.907 ****** 2026-01-22 12:06:44,198 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.475) 0:01:56.905 ****** 2026-01-22 12:06:44,234 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:44,245 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift API URL _raw_params=oc whoami --show-server=true] *** 2026-01-22 12:06:44,245 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.047) 0:01:56.955 ****** 2026-01-22 12:06:44,246 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.047) 0:01:56.953 ****** 2026-01-22 12:06:44,592 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:44,601 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift kubeconfig context _raw_params=oc whoami -c] *** 2026-01-22 12:06:44,601 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.355) 0:01:57.310 ****** 2026-01-22 12:06:44,601 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.355) 0:01:57.308 ****** 2026-01-22 12:06:44,874 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:44,888 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift current user _raw_params=oc whoami] **** 2026-01-22 12:06:44,888 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.287) 0:01:57.597 ****** 2026-01-22 12:06:44,888 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.287) 0:01:57.595 ****** 2026-01-22 12:06:45,181 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:45,191 p=33295 u=zuul n=ansible | TASK [openshift_login : Set OpenShift user, context and API facts cifmw_openshift_login_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_login_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_login_user={{ _oauth_user }}, cifmw_openshift_kubeconfig={{ cifmw_openshift_login_kubeconfig }}, cifmw_openshift_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_user={{ _oauth_user }}, cifmw_openshift_token={{ cifmw_openshift_login_token | default(omit) }}, cifmw_install_yamls_environment={{ ( cifmw_install_yamls_environment | combine({'KUBECONFIG': cifmw_openshift_login_kubeconfig}) ) if cifmw_install_yamls_environment is defined else omit }}, cacheable=True] *** 2026-01-22 12:06:45,191 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.303) 0:01:57.901 ****** 2026-01-22 12:06:45,191 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.303) 0:01:57.899 ****** 2026-01-22 12:06:45,240 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:45,248 p=33295 u=zuul n=ansible | TASK [openshift_login : Create the openshift_login parameters file dest={{ cifmw_basedir }}/artifacts/parameters/openshift-login-params.yml, content={{ cifmw_openshift_login_params_content | from_yaml | to_nice_yaml }}, mode=0600] *** 2026-01-22 12:06:45,248 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.056) 0:01:57.958 ****** 2026-01-22 12:06:45,248 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.056) 0:01:57.956 ****** 2026-01-22 12:06:45,702 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:45,715 p=33295 u=zuul n=ansible | TASK [openshift_login : Read the install yamls parameters file path={{ cifmw_basedir }}/artifacts/parameters/install-yamls-params.yml] *** 2026-01-22 12:06:45,715 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.466) 0:01:58.424 ****** 2026-01-22 12:06:45,715 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.467) 0:01:58.423 ****** 2026-01-22 12:06:45,932 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:45,942 p=33295 u=zuul n=ansible | TASK [openshift_login : Append the KUBECONFIG to the install yamls parameters content={{ cifmw_openshift_login_install_yamls_artifacts_slurp['content'] | b64decode | from_yaml | combine( { 'cifmw_install_yamls_environment': { 'KUBECONFIG': cifmw_openshift_login_kubeconfig } }, recursive=true) | to_nice_yaml }}, dest={{ cifmw_basedir }}/artifacts/parameters/install-yamls-params.yml, mode=0600] *** 2026-01-22 12:06:45,943 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.227) 0:01:58.652 ****** 2026-01-22 12:06:45,943 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.227) 0:01:58.650 ****** 2026-01-22 12:06:46,404 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:46,419 p=33295 u=zuul n=ansible | TASK [openshift_setup : Ensure output directory exists path={{ cifmw_openshift_setup_basedir }}/artifacts, state=directory, mode=0755] *** 2026-01-22 12:06:46,419 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.476) 0:01:59.129 ****** 2026-01-22 12:06:46,419 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.476) 0:01:59.127 ****** 2026-01-22 12:06:46,658 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:46,670 p=33295 u=zuul n=ansible | TASK [openshift_setup : Fetch namespaces to create cifmw_openshift_setup_namespaces={{ (( ([cifmw_install_yamls_defaults['NAMESPACE']] + ([cifmw_install_yamls_defaults['OPERATOR_NAMESPACE']] if 'OPERATOR_NAMESPACE' is in cifmw_install_yamls_defaults else []) ) if cifmw_install_yamls_defaults is defined else [] ) + cifmw_openshift_setup_create_namespaces) | unique }}] *** 2026-01-22 12:06:46,670 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.250) 0:01:59.379 ****** 2026-01-22 12:06:46,670 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.250) 0:01:59.377 ****** 2026-01-22 12:06:46,696 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:46,711 p=33295 u=zuul n=ansible | TASK [openshift_setup : Create required namespaces kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ item }}, kind=Namespace, state=present] *** 2026-01-22 12:06:46,711 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.041) 0:01:59.421 ****** 2026-01-22 12:06:46,711 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.041) 0:01:59.419 ****** 2026-01-22 12:06:47,772 p=33295 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2026-01-22 12:06:48,485 p=33295 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2026-01-22 12:06:48,513 p=33295 u=zuul n=ansible | TASK [openshift_setup : Get internal OpenShift registry route kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Route, name=default-route, namespace=openshift-image-registry] *** 2026-01-22 12:06:48,514 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:48 +0000 (0:00:01.802) 0:02:01.223 ****** 2026-01-22 12:06:48,514 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:48 +0000 (0:00:01.802) 0:02:01.221 ****** 2026-01-22 12:06:49,677 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:49,688 p=33295 u=zuul n=ansible | TASK [openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces state=present, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'kind': 'RoleBinding', 'apiVersion': 'rbac.authorization.k8s.io/v1', 'metadata': {'name': 'system:image-puller', 'namespace': '{{ item }}'}, 'subjects': [{'kind': 'User', 'name': 'system:anonymous'}, {'kind': 'User', 'name': 'system:unauthenticated'}], 'roleRef': {'kind': 'ClusterRole', 'name': 'system:image-puller'}}] *** 2026-01-22 12:06:49,688 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:49 +0000 (0:00:01.174) 0:02:02.397 ****** 2026-01-22 12:06:49,688 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:49 +0000 (0:00:01.174) 0:02:02.395 ****** 2026-01-22 12:06:50,502 p=33295 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2026-01-22 12:06:51,288 p=33295 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2026-01-22 12:06:51,302 p=33295 u=zuul n=ansible | TASK [openshift_setup : Wait for the image registry to be ready kind=Deployment, name=image-registry, namespace=openshift-image-registry, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, wait=True, wait_sleep=10, wait_timeout=600, wait_condition={'type': 'Available', 'status': 'True'}] *** 2026-01-22 12:06:51,303 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:51 +0000 (0:00:01.614) 0:02:04.012 ****** 2026-01-22 12:06:51,303 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:51 +0000 (0:00:01.614) 0:02:04.010 ****** 2026-01-22 12:06:52,247 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:52,262 p=33295 u=zuul n=ansible | TASK [openshift_setup : Login into OpenShift internal registry output_dir={{ cifmw_openshift_setup_basedir }}/artifacts, script=podman login -u {{ cifmw_openshift_user }} -p {{ cifmw_openshift_token }} {%- if cifmw_openshift_setup_skip_internal_registry_tls_verify|bool %} --tls-verify=false {%- endif %} {{ cifmw_openshift_setup_registry_default_route.resources[0].spec.host }}] *** 2026-01-22 12:06:52,262 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.959) 0:02:04.971 ****** 2026-01-22 12:06:52,262 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.959) 0:02:04.969 ****** 2026-01-22 12:06:52,312 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_002_login_into_openshift_internal.log 2026-01-22 12:06:52,516 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:52,533 p=33295 u=zuul n=ansible | TASK [Ensure we have custom CA installed on host role=install_ca] ************** 2026-01-22 12:06:52,533 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.271) 0:02:05.242 ****** 2026-01-22 12:06:52,533 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.271) 0:02:05.240 ****** 2026-01-22 12:06:52,556 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,568 p=33295 u=zuul n=ansible | TASK [openshift_setup : Update ca bundle _raw_params=update-ca-trust extract] *** 2026-01-22 12:06:52,569 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.035) 0:02:05.278 ****** 2026-01-22 12:06:52,569 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.035) 0:02:05.276 ****** 2026-01-22 12:06:52,588 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,600 p=33295 u=zuul n=ansible | TASK [openshift_setup : Slurp CAs file src={{ cifmw_openshift_setup_ca_bundle_path }}] *** 2026-01-22 12:06:52,601 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.031) 0:02:05.310 ****** 2026-01-22 12:06:52,601 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.031) 0:02:05.308 ****** 2026-01-22 12:06:52,631 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,641 p=33295 u=zuul n=ansible | TASK [openshift_setup : Create config map with registry CAs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'v1', 'kind': 'ConfigMap', 'metadata': {'namespace': 'openshift-config', 'name': 'registry-cas'}, 'data': '{{ _config_map_data | items2dict }}'}] *** 2026-01-22 12:06:52,641 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.040) 0:02:05.351 ****** 2026-01-22 12:06:52,641 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.040) 0:02:05.349 ****** 2026-01-22 12:06:52,669 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,678 p=33295 u=zuul n=ansible | TASK [openshift_setup : Install Red Hat CA for pulling images from internal registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'additionalTrustedCA': {'name': 'registry-cas'}}}] *** 2026-01-22 12:06:52,678 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.036) 0:02:05.387 ****** 2026-01-22 12:06:52,678 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.036) 0:02:05.385 ****** 2026-01-22 12:06:52,706 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,718 p=33295 u=zuul n=ansible | TASK [openshift_setup : Add insecure registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'registrySources': {'insecureRegistries': ['{{ cifmw_update_containers_registry }}'], 'allowedRegistries': '{{ all_registries }}'}}}] *** 2026-01-22 12:06:52,718 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.040) 0:02:05.428 ****** 2026-01-22 12:06:52,719 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.040) 0:02:05.426 ****** 2026-01-22 12:06:52,742 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,750 p=33295 u=zuul n=ansible | TASK [openshift_setup : Create a ICSP with repository digest mirrors kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'operator.openshift.io/v1alpha1', 'kind': 'ImageContentSourcePolicy', 'metadata': {'name': 'registry-digest-mirrors'}, 'spec': {'repositoryDigestMirrors': '{{ cifmw_openshift_setup_digest_mirrors }}'}}] *** 2026-01-22 12:06:52,750 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.031) 0:02:05.459 ****** 2026-01-22 12:06:52,750 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.031) 0:02:05.457 ****** 2026-01-22 12:06:52,784 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,793 p=33295 u=zuul n=ansible | TASK [openshift_setup : Gather network.operator info kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=operator.openshift.io/v1, kind=Network, name=cluster] *** 2026-01-22 12:06:52,793 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.043) 0:02:05.503 ****** 2026-01-22 12:06:52,793 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.043) 0:02:05.501 ****** 2026-01-22 12:06:53,567 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:53,596 p=33295 u=zuul n=ansible | TASK [openshift_setup : Patch network operator api_version=operator.openshift.io/v1, kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Network, name=cluster, persist_config=True, patch=[{'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/routingViaHost', 'value': True, 'op': 'replace'}, {'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/ipForwarding', 'value': 'Global', 'op': 'replace'}]] *** 2026-01-22 12:06:53,596 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:53 +0000 (0:00:00.802) 0:02:06.305 ****** 2026-01-22 12:06:53,596 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:53 +0000 (0:00:00.802) 0:02:06.303 ****** 2026-01-22 12:06:54,542 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:54,556 p=33295 u=zuul n=ansible | TASK [openshift_setup : Patch samples registry configuration kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=samples.operator.openshift.io/v1, kind=Config, name=cluster, patch=[{'op': 'replace', 'path': '/spec/samplesRegistry', 'value': 'registry.redhat.io'}]] *** 2026-01-22 12:06:54,556 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:54 +0000 (0:00:00.959) 0:02:07.265 ****** 2026-01-22 12:06:54,556 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:54 +0000 (0:00:00.959) 0:02:07.263 ****** 2026-01-22 12:06:55,340 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:55,352 p=33295 u=zuul n=ansible | TASK [openshift_setup : Delete the pods from openshift-marketplace namespace kind=Pod, state=absent, delete_all=True, kubeconfig={{ cifmw_openshift_kubeconfig }}, namespace=openshift-marketplace] *** 2026-01-22 12:06:55,352 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.796) 0:02:08.062 ****** 2026-01-22 12:06:55,352 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.796) 0:02:08.060 ****** 2026-01-22 12:06:55,372 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,385 p=33295 u=zuul n=ansible | TASK [openshift_setup : Wait for openshift-marketplace pods to be running _raw_params=oc wait pod --all --for=condition=Ready -n openshift-marketplace --timeout=1m] *** 2026-01-22 12:06:55,386 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.033) 0:02:08.095 ****** 2026-01-22 12:06:55,386 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.033) 0:02:08.093 ****** 2026-01-22 12:06:55,406 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,429 p=33295 u=zuul n=ansible | TASK [Deploy Observability operator. name=openshift_obs] *********************** 2026-01-22 12:06:55,429 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.043) 0:02:08.138 ****** 2026-01-22 12:06:55,429 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.043) 0:02:08.136 ****** 2026-01-22 12:06:55,454 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,463 p=33295 u=zuul n=ansible | TASK [Deploy Metal3 BMHs name=deploy_bmh] ************************************** 2026-01-22 12:06:55,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.033) 0:02:08.172 ****** 2026-01-22 12:06:55,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.033) 0:02:08.170 ****** 2026-01-22 12:06:55,482 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,491 p=33295 u=zuul n=ansible | TASK [Install certmanager operator role name=cert_manager] ********************* 2026-01-22 12:06:55,492 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.028) 0:02:08.201 ****** 2026-01-22 12:06:55,492 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.028) 0:02:08.199 ****** 2026-01-22 12:06:55,512 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,523 p=33295 u=zuul n=ansible | TASK [Configure hosts networking using nmstate name=ci_nmstate] **************** 2026-01-22 12:06:55,523 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.031) 0:02:08.233 ****** 2026-01-22 12:06:55,523 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.031) 0:02:08.231 ****** 2026-01-22 12:06:55,547 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,555 p=33295 u=zuul n=ansible | TASK [Configure multus networks name=ci_multus] ******************************** 2026-01-22 12:06:55,555 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.032) 0:02:08.265 ****** 2026-01-22 12:06:55,555 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.032) 0:02:08.263 ****** 2026-01-22 12:06:55,578 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,586 p=33295 u=zuul n=ansible | TASK [Deploy Sushy Emulator service pod name=sushy_emulator] ******************* 2026-01-22 12:06:55,586 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.030) 0:02:08.295 ****** 2026-01-22 12:06:55,586 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.030) 0:02:08.293 ****** 2026-01-22 12:06:55,606 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,615 p=33295 u=zuul n=ansible | TASK [Setup Libvirt on controller name=libvirt_manager] ************************ 2026-01-22 12:06:55,615 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.029) 0:02:08.324 ****** 2026-01-22 12:06:55,615 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.029) 0:02:08.322 ****** 2026-01-22 12:06:55,633 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,643 p=33295 u=zuul n=ansible | TASK [Prepare container package builder name=pkg_build] ************************ 2026-01-22 12:06:55,643 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.027) 0:02:08.352 ****** 2026-01-22 12:06:55,643 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.027) 0:02:08.350 ****** 2026-01-22 12:06:55,665 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,673 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:06:55,673 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.030) 0:02:08.383 ****** 2026-01-22 12:06:55,674 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.030) 0:02:08.381 ****** 2026-01-22 12:06:55,733 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:55,742 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:06:55,742 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.068) 0:02:08.451 ****** 2026-01-22 12:06:55,742 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.068) 0:02:08.449 ****** 2026-01-22 12:06:55,825 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:55,834 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_infra _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:06:55,835 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.092) 0:02:08.544 ****** 2026-01-22 12:06:55,835 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.092) 0:02:08.542 ****** 2026-01-22 12:06:55,914 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,935 p=33295 u=zuul n=ansible | TASK [Set content-provider variables dictionary cifmw_content_provider_params={'cifmw_operator_build_output': '{{ cifmw_operator_build_output }}', 'cifmw_operator_build_meta_name': 'openstack-operator'}, cacheable=True] *** 2026-01-22 12:06:55,936 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.101) 0:02:08.645 ****** 2026-01-22 12:06:55,936 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.101) 0:02:08.643 ****** 2026-01-22 12:06:55,973 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,983 p=33295 u=zuul n=ansible | TASK [Write variables to cifmw custom params file path={{ cifwm_data_folder }}/custom-params.yml, line={{ cifmw_content_provider_params | to_nice_yaml }}] *** 2026-01-22 12:06:55,983 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.047) 0:02:08.692 ****** 2026-01-22 12:06:55,983 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.047) 0:02:08.690 ****** 2026-01-22 12:06:56,006 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:56,019 p=33295 u=zuul n=ansible | TASK [Set content-provider variables dictionary cifmw_content_provider_params={'cifmw_operator_build_output': {'operators': {'openstack-operator': {'image_catalog': "{{ operators_catalog_img['openstack-operator'] }}"}}}, 'cifmw_operator_build_meta_name': 'openstack-operator'}, cacheable=True] *** 2026-01-22 12:06:56,020 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.036) 0:02:08.729 ****** 2026-01-22 12:06:56,020 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.036) 0:02:08.727 ****** 2026-01-22 12:06:56,044 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:56,053 p=33295 u=zuul n=ansible | TASK [Write variables to cifmw custom params file path={{ cifwm_data_folder }}/custom-params.yml, line={{ cifmw_content_provider_params | to_nice_yaml }}] *** 2026-01-22 12:06:56,053 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.033) 0:02:08.762 ****** 2026-01-22 12:06:56,053 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.033) 0:02:08.760 ****** 2026-01-22 12:06:56,075 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:56,085 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:06:56,085 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.032) 0:02:08.795 ****** 2026-01-22 12:06:56,086 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.032) 0:02:08.793 ****** 2026-01-22 12:06:56,145 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,154 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:06:56,154 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.068) 0:02:08.863 ****** 2026-01-22 12:06:56,154 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.068) 0:02:08.862 ****** 2026-01-22 12:06:56,232 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,246 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_deploy _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:06:56,246 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.091) 0:02:08.955 ****** 2026-01-22 12:06:56,246 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.091) 0:02:08.953 ****** 2026-01-22 12:06:56,347 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': 'Fetch compute facts', 'type': 'playbook', 'inventory': '/home/zuul/ci-framework-data/artifacts/zuul_inventory.yml', 'source': 'fetch_compute_facts.yml'}) 2026-01-22 12:06:56,359 p=33295 u=zuul n=ansible | TASK [run_hook : Set playbook path for Fetch compute facts cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e namespace={{ cifmw_openstack_namespace }} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2026-01-22 12:06:56,359 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.113) 0:02:09.068 ****** 2026-01-22 12:06:56,359 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.112) 0:02:09.066 ****** 2026-01-22 12:06:56,403 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,420 p=33295 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2026-01-22 12:06:56,420 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.060) 0:02:09.129 ****** 2026-01-22 12:06:56,420 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.061) 0:02:09.127 ****** 2026-01-22 12:06:56,687 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,696 p=33295 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2026-01-22 12:06:56,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.276) 0:02:09.406 ****** 2026-01-22 12:06:56,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.276) 0:02:09.404 ****** 2026-01-22 12:06:56,722 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:56,733 p=33295 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2026-01-22 12:06:56,733 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.036) 0:02:09.442 ****** 2026-01-22 12:06:56,733 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.036) 0:02:09.440 ****** 2026-01-22 12:06:56,926 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,944 p=33295 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2026-01-22 12:06:56,944 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.211) 0:02:09.654 ****** 2026-01-22 12:06:56,945 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.211) 0:02:09.652 ****** 2026-01-22 12:06:56,972 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,985 p=33295 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2026-01-22 12:06:56,986 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.041) 0:02:09.695 ****** 2026-01-22 12:06:56,986 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.041) 0:02:09.693 ****** 2026-01-22 12:06:57,204 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:57,219 p=33295 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2026-01-22 12:06:57,219 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:57 +0000 (0:00:00.233) 0:02:09.928 ****** 2026-01-22 12:06:57,219 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:57 +0000 (0:00:00.233) 0:02:09.926 ****** 2026-01-22 12:06:57,418 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:57,434 p=33295 u=zuul n=ansible | TASK [run_hook : Run hook without retry - Fetch compute facts] ***************** 2026-01-22 12:06:57,434 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:57 +0000 (0:00:00.215) 0:02:10.144 ****** 2026-01-22 12:06:57,435 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:57 +0000 (0:00:00.215) 0:02:10.142 ****** 2026-01-22 12:06:57,503 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_003_run_hook_without_retry_fetch.log 2026-01-22 12:07:00,446 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:00,463 p=33295 u=zuul n=ansible | TASK [run_hook : Run hook with retry - Fetch compute facts] ******************** 2026-01-22 12:07:00,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:03.028) 0:02:13.172 ****** 2026-01-22 12:07:00,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:03.028) 0:02:13.170 ****** 2026-01-22 12:07:00,490 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:00,510 p=33295 u=zuul n=ansible | TASK [run_hook : Check if we have a file path={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2026-01-22 12:07:00,510 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.047) 0:02:13.219 ****** 2026-01-22 12:07:00,510 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.047) 0:02:13.218 ****** 2026-01-22 12:07:00,822 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:00,835 p=33295 u=zuul n=ansible | TASK [run_hook : Load generated content in main playbook file={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2026-01-22 12:07:00,835 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.324) 0:02:13.544 ****** 2026-01-22 12:07:00,835 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.324) 0:02:13.542 ****** 2026-01-22 12:07:00,869 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:00,892 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2026-01-22 12:07:00,892 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.056) 0:02:13.601 ****** 2026-01-22 12:07:00,892 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.057) 0:02:13.599 ****** 2026-01-22 12:07:01,027 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:01,042 p=33295 u=zuul n=ansible | TASK [Configure Storage Class name=ci_local_storage] *************************** 2026-01-22 12:07:01,042 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.150) 0:02:13.752 ****** 2026-01-22 12:07:01,042 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.150) 0:02:13.750 ****** 2026-01-22 12:07:01,168 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create role needed directories path={{ cifmw_cls_manifests_dir }}, state=directory, mode=0755] *** 2026-01-22 12:07:01,169 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.126) 0:02:13.878 ****** 2026-01-22 12:07:01,169 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.126) 0:02:13.876 ****** 2026-01-22 12:07:01,360 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:01,368 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create the cifmw_cls_namespace namespace" kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ cifmw_cls_namespace }}, kind=Namespace, state=present] *** 2026-01-22 12:07:01,369 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.199) 0:02:14.078 ****** 2026-01-22 12:07:01,369 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.199) 0:02:14.076 ****** 2026-01-22 12:07:02,139 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:02,152 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Save storage manifests as artifacts dest={{ cifmw_cls_manifests_dir }}/storage-class.yaml, content={{ cifmw_cls_storage_manifest | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:07:02,152 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:02 +0000 (0:00:00.783) 0:02:14.862 ****** 2026-01-22 12:07:02,152 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:02 +0000 (0:00:00.783) 0:02:14.860 ****** 2026-01-22 12:07:02,577 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:02,589 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Get k8s nodes kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Node] *** 2026-01-22 12:07:02,589 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:02 +0000 (0:00:00.436) 0:02:15.298 ****** 2026-01-22 12:07:02,589 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:02 +0000 (0:00:00.436) 0:02:15.296 ****** 2026-01-22 12:07:03,437 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:03,454 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Fetch hostnames for all hosts _raw_params=hostname] *** 2026-01-22 12:07:03,454 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:03 +0000 (0:00:00.864) 0:02:16.163 ****** 2026-01-22 12:07:03,454 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:03 +0000 (0:00:00.864) 0:02:16.161 ****** 2026-01-22 12:07:04,400 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=crc) 2026-01-22 12:07:04,953 p=33295 u=zuul n=ansible | changed: [localhost -> standalone(38.102.83.154)] => (item=standalone) 2026-01-22 12:07:05,510 p=33295 u=zuul n=ansible | changed: [localhost -> controller(38.102.83.83)] => (item=controller) 2026-01-22 12:07:05,988 p=33295 u=zuul n=ansible | changed: [localhost] => (item=localhost) 2026-01-22 12:07:05,990 p=33295 u=zuul n=ansible | [WARNING]: Platform linux on host localhost is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.15/reference_appendices/interpreter_discovery.html for more information. 2026-01-22 12:07:06,001 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Set the hosts k8s ansible hosts cifmw_ci_local_storage_k8s_hosts={{ _host_map | selectattr("key", "in", k8s_nodes_hostnames) | map(attribute="value") | list }}, cifmw_ci_local_storage_k8s_hostnames={{ k8s_nodes_hostnames }}] *** 2026-01-22 12:07:06,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:05 +0000 (0:00:02.547) 0:02:18.710 ****** 2026-01-22 12:07:06,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:05 +0000 (0:00:02.546) 0:02:18.708 ****** 2026-01-22 12:07:06,039 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:06,049 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Apply the storage class manifests kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, src={{ cifmw_cls_manifests_dir }}/storage-class.yaml] *** 2026-01-22 12:07:06,049 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.048) 0:02:18.758 ****** 2026-01-22 12:07:06,049 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.048) 0:02:18.756 ****** 2026-01-22 12:07:06,800 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:06,812 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create directories on worker node _raw_params=worker_node_dirs.yml] *** 2026-01-22 12:07:06,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.763) 0:02:19.522 ****** 2026-01-22 12:07:06,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.763) 0:02:19.520 ****** 2026-01-22 12:07:06,849 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_local_storage/tasks/worker_node_dirs.yml for localhost => (item=crc) 2026-01-22 12:07:06,861 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Perform action in the PV directory path={{ [ cifmw_cls_local_storage_name, 'pv'+ ("%02d" | format(item | int)) ] | path_join }}, state={{ 'directory' if cifmw_cls_action == 'create' else 'absent' }}, mode=0775] *** 2026-01-22 12:07:06,861 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.048) 0:02:19.571 ****** 2026-01-22 12:07:06,861 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.048) 0:02:19.569 ****** 2026-01-22 12:07:07,363 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=1) 2026-01-22 12:07:07,814 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=2) 2026-01-22 12:07:08,227 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=3) 2026-01-22 12:07:08,679 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=4) 2026-01-22 12:07:09,121 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=5) 2026-01-22 12:07:09,600 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=6) 2026-01-22 12:07:10,043 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=7) 2026-01-22 12:07:10,526 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=8) 2026-01-22 12:07:10,968 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=9) 2026-01-22 12:07:11,443 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=10) 2026-01-22 12:07:11,917 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=11) 2026-01-22 12:07:12,354 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=12) 2026-01-22 12:07:12,369 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Generate pv related storage manifest file src=storage.yaml.j2, dest={{ cifmw_cls_manifests_dir }}/storage.yaml, mode=0644] *** 2026-01-22 12:07:12,369 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:12 +0000 (0:00:05.508) 0:02:25.079 ****** 2026-01-22 12:07:12,369 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:12 +0000 (0:00:05.508) 0:02:25.077 ****** 2026-01-22 12:07:12,809 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:12,819 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Apply pv related storage manifest file kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, src={{ cifmw_cls_manifests_dir }}/storage.yaml] *** 2026-01-22 12:07:12,819 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:12 +0000 (0:00:00.449) 0:02:25.528 ****** 2026-01-22 12:07:12,819 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:12 +0000 (0:00:00.449) 0:02:25.526 ****** 2026-01-22 12:07:13,721 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:13,764 p=33295 u=zuul n=ansible | TASK [Configure LVMS Storage Class name=ci_lvms_storage] *********************** 2026-01-22 12:07:13,764 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.945) 0:02:26.474 ****** 2026-01-22 12:07:13,764 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.945) 0:02:26.472 ****** 2026-01-22 12:07:13,799 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:13,811 p=33295 u=zuul n=ansible | TASK [Run edpm_prepare name=edpm_prepare] ************************************** 2026-01-22 12:07:13,811 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.046) 0:02:26.520 ****** 2026-01-22 12:07:13,811 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.046) 0:02:26.518 ****** 2026-01-22 12:07:13,952 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Define minimal set of repo variables when not running on Zuul _install_yamls_repos={'OPENSTACK_BRANCH': '', "GIT_CLONE_OPTS'": '-l', "OPENSTACK_REPO'": '{{ operators_build_output[cifmw_operator_build_meta_name].git_src_dir }}'}] *** 2026-01-22 12:07:13,952 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.141) 0:02:26.662 ****** 2026-01-22 12:07:13,952 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.141) 0:02:26.660 ****** 2026-01-22 12:07:13,977 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:13,987 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Set install_yamls Makefile environment variables cifmw_edpm_prepare_common_env={{ cifmw_install_yamls_environment | combine({'PATH': cifmw_path}) | combine(_install_yamls_repos | default({})) | combine(cifmw_edpm_prepare_extra_vars | default({})) }}, cifmw_edpm_prepare_make_openstack_env={% if cifmw_operator_build_meta_name is defined and cifmw_operator_build_meta_name in operators_build_output %} OPENSTACK_IMG: {{ operators_build_output[cifmw_operator_build_meta_name].image_catalog }} {% endif %} , cifmw_edpm_prepare_make_openstack_deploy_prep_env=CLEANUP_DIR_CMD: "true" , cifmw_edpm_prepare_operators_build_output={{ operators_build_output }}] *** 2026-01-22 12:07:13,987 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.034) 0:02:26.696 ****** 2026-01-22 12:07:13,987 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.034) 0:02:26.694 ****** 2026-01-22 12:07:14,017 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:14,027 p=33295 u=zuul n=ansible | TASK [Prepare storage in CRC name=install_yamls_makes, tasks_from=make_crc_storage] *** 2026-01-22 12:07:14,027 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.039) 0:02:26.736 ****** 2026-01-22 12:07:14,027 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.039) 0:02:26.734 ****** 2026-01-22 12:07:14,083 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_crc_storage_env var=make_crc_storage_env] *** 2026-01-22 12:07:14,083 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.056) 0:02:26.792 ****** 2026-01-22 12:07:14,083 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.056) 0:02:26.790 ****** 2026-01-22 12:07:14,127 p=33295 u=zuul n=ansible | ok: [localhost] => make_crc_storage_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:07:14,135 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_crc_storage_params var=make_crc_storage_params] *** 2026-01-22 12:07:14,135 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.052) 0:02:26.844 ****** 2026-01-22 12:07:14,135 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.052) 0:02:26.842 ****** 2026-01-22 12:07:14,165 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:14,174 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run crc_storage output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make crc_storage, dry_run={{ make_crc_storage_dryrun|default(false)|bool }}, extra_args={{ dict((make_crc_storage_env|default({})), **(make_crc_storage_params|default({}))) }}] *** 2026-01-22 12:07:14,174 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.038) 0:02:26.883 ****** 2026-01-22 12:07:14,174 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.038) 0:02:26.881 ****** 2026-01-22 12:07:14,245 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_004_run_crc.log 2026-01-22 12:07:35,989 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_crc_storage_until | default(true) }} 2026-01-22 12:07:35,992 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:36,008 p=33295 u=zuul n=ansible | TASK [Prepare inputs name=install_yamls_makes, tasks_from=make_input] ********** 2026-01-22 12:07:36,008 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:21.834) 0:02:48.718 ****** 2026-01-22 12:07:36,008 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:21.834) 0:02:48.716 ****** 2026-01-22 12:07:36,064 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_input_env var=make_input_env] *********** 2026-01-22 12:07:36,064 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.055) 0:02:48.773 ****** 2026-01-22 12:07:36,064 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.055) 0:02:48.771 ****** 2026-01-22 12:07:36,090 p=33295 u=zuul n=ansible | ok: [localhost] => make_input_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:07:36,098 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_input_params var=make_input_params] ***** 2026-01-22 12:07:36,099 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.034) 0:02:48.808 ****** 2026-01-22 12:07:36,099 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.034) 0:02:48.806 ****** 2026-01-22 12:07:36,117 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:36,126 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run input output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make input, dry_run={{ make_input_dryrun|default(false)|bool }}, extra_args={{ dict((make_input_env|default({})), **(make_input_params|default({}))) }}] *** 2026-01-22 12:07:36,126 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.027) 0:02:48.836 ****** 2026-01-22 12:07:36,126 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.027) 0:02:48.834 ****** 2026-01-22 12:07:36,171 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_005_run.log 2026-01-22 12:07:37,257 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_input_until | default(true) }} 2026-01-22 12:07:37,259 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:37,273 p=33295 u=zuul n=ansible | TASK [OpenStack meta-operator installation name=install_yamls_makes, tasks_from=make_openstack] *** 2026-01-22 12:07:37,273 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:01.146) 0:02:49.983 ****** 2026-01-22 12:07:37,273 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:01.146) 0:02:49.981 ****** 2026-01-22 12:07:37,331 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_env var=make_openstack_env] *** 2026-01-22 12:07:37,331 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.057) 0:02:50.040 ****** 2026-01-22 12:07:37,331 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.057) 0:02:50.038 ****** 2026-01-22 12:07:37,359 p=33295 u=zuul n=ansible | ok: [localhost] => make_openstack_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:07:37,367 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_params var=make_openstack_params] *** 2026-01-22 12:07:37,368 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.036) 0:02:50.077 ****** 2026-01-22 12:07:37,368 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.036) 0:02:50.075 ****** 2026-01-22 12:07:37,388 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:37,397 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run openstack output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make openstack, dry_run={{ make_openstack_dryrun|default(false)|bool }}, extra_args={{ dict((make_openstack_env|default({})), **(make_openstack_params|default({}))) }}] *** 2026-01-22 12:07:37,397 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.029) 0:02:50.106 ****** 2026-01-22 12:07:37,397 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.029) 0:02:50.104 ****** 2026-01-22 12:07:37,444 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_006_run.log 2026-01-22 12:10:42,739 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_openstack_until | default(true) }} 2026-01-22 12:10:42,743 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:10:42,758 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Wait for OpenStack subscription creation _raw_params=oc get sub openstack-operator --namespace={{ cifmw_install_yamls_defaults['OPERATOR_NAMESPACE'] }} -o=jsonpath='{.status.installplan.name}'] *** 2026-01-22 12:10:42,758 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:10:42 +0000 (0:03:05.361) 0:05:55.468 ****** 2026-01-22 12:10:42,758 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:10:42 +0000 (0:03:05.361) 0:05:55.466 ****** 2026-01-22 12:11:43,707 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:11:43,716 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Wait for OpenStack operator to get installed _raw_params=oc wait InstallPlan {{ cifmw_edpm_prepare_wait_installplan_out.stdout }} --namespace={{ cifmw_install_yamls_defaults['OPERATOR_NAMESPACE'] }} --for=jsonpath='{.status.phase}'=Complete --timeout=20m] *** 2026-01-22 12:11:43,716 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:43 +0000 (0:01:00.958) 0:06:56.426 ****** 2026-01-22 12:11:43,716 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:43 +0000 (0:01:00.958) 0:06:56.424 ****** 2026-01-22 12:11:44,125 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:11:44,135 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Check if the OpenStack initialization CRD exists kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, kind=CustomResourceDefinition, name=openstacks.operator.openstack.org] *** 2026-01-22 12:11:44,135 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:44 +0000 (0:00:00.419) 0:06:56.845 ****** 2026-01-22 12:11:44,136 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:44 +0000 (0:00:00.419) 0:06:56.843 ****** 2026-01-22 12:11:45,037 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:11:45,048 p=33295 u=zuul n=ansible | TASK [OpenStack meta-operator initialization, if necessary name=install_yamls_makes, tasks_from=make_openstack_init] *** 2026-01-22 12:11:45,048 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.912) 0:06:57.757 ****** 2026-01-22 12:11:45,048 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.912) 0:06:57.755 ****** 2026-01-22 12:11:45,113 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_init_env var=make_openstack_init_env] *** 2026-01-22 12:11:45,114 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.065) 0:06:57.823 ****** 2026-01-22 12:11:45,114 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.065) 0:06:57.821 ****** 2026-01-22 12:11:45,150 p=33295 u=zuul n=ansible | ok: [localhost] => make_openstack_init_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:11:45,159 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_init_params var=make_openstack_init_params] *** 2026-01-22 12:11:45,159 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.045) 0:06:57.869 ****** 2026-01-22 12:11:45,159 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.045) 0:06:57.867 ****** 2026-01-22 12:11:45,187 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:11:45,197 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run openstack_init output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make openstack_init, dry_run={{ make_openstack_init_dryrun|default(false)|bool }}, extra_args={{ dict((make_openstack_init_env|default({})), **(make_openstack_init_params|default({}))) }}] *** 2026-01-22 12:11:45,197 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.038) 0:06:57.907 ****** 2026-01-22 12:11:45,197 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.038) 0:06:57.905 ****** 2026-01-22 12:11:45,255 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_007_run_openstack.log 2026-01-22 12:13:29,514 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_openstack_init_until | default(true) }} 2026-01-22 12:13:29,515 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:13:29,534 p=33295 u=zuul n=ansible | TASK [Update OpenStack Services containers Env name=set_openstack_containers] *** 2026-01-22 12:13:29,534 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:01:44.336) 0:08:42.243 ****** 2026-01-22 12:13:29,534 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:01:44.336) 0:08:42.241 ****** 2026-01-22 12:13:29,561 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:29,569 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Set facts for baremetal UEFI image url cifmw_update_containers_edpm_image_url={{ cifmw_build_images_output['images']['edpm-hardened-uefi']['image'] }}, cacheable=True] *** 2026-01-22 12:13:29,570 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.035) 0:08:42.279 ****** 2026-01-22 12:13:29,570 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.035) 0:08:42.277 ****** 2026-01-22 12:13:29,594 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:29,606 p=33295 u=zuul n=ansible | TASK [Prepare OpenStack control plane CR name=install_yamls_makes, tasks_from=make_openstack_deploy_prep] *** 2026-01-22 12:13:29,607 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.036) 0:08:42.316 ****** 2026-01-22 12:13:29,607 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.036) 0:08:42.314 ****** 2026-01-22 12:13:29,672 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_deploy_prep_env var=make_openstack_deploy_prep_env] *** 2026-01-22 12:13:29,672 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.065) 0:08:42.382 ****** 2026-01-22 12:13:29,672 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.065) 0:08:42.380 ****** 2026-01-22 12:13:29,707 p=33295 u=zuul n=ansible | ok: [localhost] => make_openstack_deploy_prep_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' CLEANUP_DIR_CMD: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:13:29,717 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_deploy_prep_params var=make_openstack_deploy_prep_params] *** 2026-01-22 12:13:29,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.044) 0:08:42.426 ****** 2026-01-22 12:13:29,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.044) 0:08:42.424 ****** 2026-01-22 12:13:29,744 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:29,753 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run openstack_deploy_prep output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make openstack_deploy_prep, dry_run={{ make_openstack_deploy_prep_dryrun|default(false)|bool }}, extra_args={{ dict((make_openstack_deploy_prep_env|default({})), **(make_openstack_deploy_prep_params|default({}))) }}] *** 2026-01-22 12:13:29,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.036) 0:08:42.462 ****** 2026-01-22 12:13:29,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.036) 0:08:42.461 ****** 2026-01-22 12:13:29,810 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_008_run_openstack_deploy.log 2026-01-22 12:13:31,038 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_openstack_deploy_prep_until | default(true) }} 2026-01-22 12:13:31,040 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:13:31,058 p=33295 u=zuul n=ansible | TASK [Deploy NetConfig name=install_yamls_makes, tasks_from=make_netconfig_deploy] *** 2026-01-22 12:13:31,059 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:01.305) 0:08:43.768 ****** 2026-01-22 12:13:31,059 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:01.305) 0:08:43.766 ****** 2026-01-22 12:13:31,133 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_netconfig_deploy_env var=make_netconfig_deploy_env] *** 2026-01-22 12:13:31,133 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.074) 0:08:43.843 ****** 2026-01-22 12:13:31,133 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.074) 0:08:43.841 ****** 2026-01-22 12:13:31,161 p=33295 u=zuul n=ansible | ok: [localhost] => make_netconfig_deploy_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:13:31,169 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_netconfig_deploy_params var=make_netconfig_deploy_params] *** 2026-01-22 12:13:31,169 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.036) 0:08:43.879 ****** 2026-01-22 12:13:31,170 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.036) 0:08:43.877 ****** 2026-01-22 12:13:31,191 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:31,201 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run netconfig_deploy output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make netconfig_deploy, dry_run={{ make_netconfig_deploy_dryrun|default(false)|bool }}, extra_args={{ dict((make_netconfig_deploy_env|default({})), **(make_netconfig_deploy_params|default({}))) }}] *** 2026-01-22 12:13:31,201 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.031) 0:08:43.911 ****** 2026-01-22 12:13:31,201 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.031) 0:08:43.909 ****** 2026-01-22 12:13:31,261 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_009_run_netconfig.log 2026-01-22 12:13:35,810 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_netconfig_deploy_until | default(true) }} 2026-01-22 12:13:35,813 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:13:35,831 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Kustomize and deploy OpenStackControlPlane _raw_params=kustomize_and_deploy.yml] *** 2026-01-22 12:13:35,831 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:04.629) 0:08:48.540 ****** 2026-01-22 12:13:35,831 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:04.629) 0:08:48.538 ****** 2026-01-22 12:13:35,873 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/edpm_prepare/tasks/kustomize_and_deploy.yml for localhost 2026-01-22 12:13:35,904 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Controlplane name _ctlplane_name=controlplane] ************ 2026-01-22 12:13:35,904 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.073) 0:08:48.614 ****** 2026-01-22 12:13:35,905 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.073) 0:08:48.612 ****** 2026-01-22 12:13:35,928 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:13:35,938 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Set vars related to update_containers content provider cifmw_update_containers_registry={{ content_provider_os_registry_url | split('/') | first }}, cifmw_update_containers_org={{ content_provider_os_registry_url | split('/') | last }}, cifmw_update_containers_tag={{ content_provider_dlrn_md5_hash }}, cifmw_update_containers_openstack=True] *** 2026-01-22 12:13:35,938 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.033) 0:08:48.647 ****** 2026-01-22 12:13:35,938 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.033) 0:08:48.645 ****** 2026-01-22 12:13:35,959 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:35,968 p=33295 u=zuul n=ansible | TASK [Prepare OpenStackVersion CR name=update_containers] ********************** 2026-01-22 12:13:35,968 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.030) 0:08:48.678 ****** 2026-01-22 12:13:35,968 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.030) 0:08:48.676 ****** 2026-01-22 12:13:35,992 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:36,001 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Controlplane name kustomization _ctlplane_name_kustomizations=[{'apiVersion': 'kustomize.config.k8s.io/v1beta1', 'kind': 'Kustomization', 'patches': [{'target': {'kind': 'OpenStackControlPlane'}, 'patch': '- op: replace\n path: /metadata/name\n value: {{ _ctlplane_name }}'}]}]] *** 2026-01-22 12:13:36,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.032) 0:08:48.710 ****** 2026-01-22 12:13:36,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.032) 0:08:48.709 ****** 2026-01-22 12:13:36,027 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:13:36,046 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Perform kustomizations to the OpenStackControlPlane CR target_path={{ cifmw_edpm_prepare_openstack_crs_path }}, sort_ascending=False, kustomizations={{ cifmw_edpm_prepare_kustomizations + _ctlplane_name_kustomizations + (cifmw_edpm_prepare_extra_kustomizations | default([])) }}, kustomizations_paths={{ [ ( [ cifmw_edpm_prepare_manifests_dir, 'kustomizations', 'controlplane' ] | ansible.builtin.path_join ) ] }}] *** 2026-01-22 12:13:36,046 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.044) 0:08:48.755 ****** 2026-01-22 12:13:36,046 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.044) 0:08:48.753 ****** 2026-01-22 12:13:36,766 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:13:36,781 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Log the CR that is about to be applied var=cifmw_edpm_prepare_crs_kustomize_result] *** 2026-01-22 12:13:36,781 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.735) 0:08:49.490 ****** 2026-01-22 12:13:36,781 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.735) 0:08:49.488 ****** 2026-01-22 12:13:36,821 p=33295 u=zuul n=ansible | ok: [localhost] => cifmw_edpm_prepare_crs_kustomize_result: changed: true count: 3 failed: false kustomizations_paths: - /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr/kustomization.yaml - /home/zuul/ci-framework-data/artifacts/manifests/kustomizations/controlplane/99-kustomization.yaml output_path: /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr/cifmw-kustomization-result.yaml result: - apiVersion: core.openstack.org/v1beta1 kind: OpenStackControlPlane metadata: labels: created-by: install_yamls name: controlplane namespace: openstack spec: barbican: apiOverride: route: {} template: barbicanAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 barbicanKeystoneListener: replicas: 1 barbicanWorker: replicas: 1 databaseInstance: openstack secret: osp-secret cinder: apiOverride: route: {} template: cinderAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer cinderBackup: networkAttachments: - storage replicas: 0 cinderScheduler: replicas: 1 cinderVolumes: volume1: networkAttachments: - storage replicas: 0 databaseInstance: openstack secret: osp-secret designate: apiOverride: route: {} enabled: false template: databaseInstance: openstack designateAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer designateBackendbind9: networkAttachments: - designate replicas: 1 storageClass: local-storage storageRequest: 10G designateCentral: replicas: 1 designateMdns: networkAttachments: - designate replicas: 1 designateProducer: replicas: 1 designateWorker: networkAttachments: - designate replicas: 1 secret: osp-secret dns: template: options: - key: server values: - 192.168.122.10 - key: no-negcache values: [] override: service: metadata: annotations: metallb.universe.tf/address-pool: ctlplane metallb.universe.tf/allow-shared-ip: ctlplane metallb.universe.tf/loadBalancerIPs: 192.168.122.80 spec: type: LoadBalancer replicas: 1 galera: templates: openstack: replicas: 1 secret: osp-secret storageRequest: 10G openstack-cell1: replicas: 1 secret: osp-secret storageRequest: 10G glance: apiOverrides: default: route: {} template: customServiceConfig: | [DEFAULT] enabled_backends = default_backend:swift [glance_store] default_backend = default_backend [default_backend] swift_store_create_container_on_put = True swift_store_auth_version = 3 swift_store_auth_address = {{ .KeystoneInternalURL }} swift_store_endpoint_type = internalURL swift_store_user = service:glance swift_store_key = {{ .ServicePassword }} databaseInstance: openstack glanceAPIs: default: networkAttachments: - storage override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 keystoneEndpoint: default secret: osp-secret storage: storageClass: '' storageRequest: 10G heat: apiOverride: route: {} cnfAPIOverride: route: {} enabled: false template: databaseInstance: openstack heatAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 heatEngine: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 secret: osp-secret horizon: apiOverride: route: {} template: replicas: 1 secret: osp-secret ironic: enabled: false template: databaseInstance: openstack ironicAPI: replicas: 1 ironicConductors: - replicas: 1 storageRequest: 10G ironicInspector: replicas: 1 ironicNeutronAgent: replicas: 1 secret: osp-secret keystone: apiOverride: route: {} template: databaseInstance: openstack override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: osp-secret manila: apiOverride: route: {} template: databaseInstance: openstack manilaAPI: networkAttachments: - internalapi override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 manilaScheduler: replicas: 1 manilaShares: share1: networkAttachments: - storage replicas: 1 memcached: templates: memcached: replicas: 1 neutron: apiOverride: route: {} template: databaseInstance: openstack networkAttachments: - internalapi override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: osp-secret nova: apiOverride: route: {} template: apiServiceTemplate: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer cellTemplates: cell0: cellDatabaseAccount: nova-cell0 cellDatabaseInstance: openstack cellMessageBusInstance: rabbitmq conductorServiceTemplate: replicas: 1 hasAPIAccess: true cell1: cellDatabaseAccount: nova-cell1 cellDatabaseInstance: openstack-cell1 cellMessageBusInstance: rabbitmq-cell1 conductorServiceTemplate: replicas: 1 hasAPIAccess: true metadataServiceTemplate: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: osp-secret octavia: enabled: false template: databaseInstance: openstack octaviaAPI: replicas: 1 secret: osp-secret ovn: template: ovnController: networkAttachment: tenant nicMappings: datacentre: ospbr ovnDBCluster: ovndbcluster-nb: dbType: NB networkAttachment: internalapi storageRequest: 10G ovndbcluster-sb: dbType: SB networkAttachment: internalapi storageRequest: 10G placement: apiOverride: route: {} template: databaseInstance: openstack override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: osp-secret rabbitmq: templates: rabbitmq: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.85 spec: type: LoadBalancer rabbitmq-cell1: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.86 spec: type: LoadBalancer redis: enabled: false secret: osp-secret storageClass: local-storage swift: enabled: true proxyOverride: route: {} template: swiftProxy: networkAttachments: - storage override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 swiftRing: ringReplicas: 1 swiftStorage: networkAttachments: - storage replicas: 1 telemetry: enabled: true template: autoscaling: aodh: databaseAccount: aodh databaseInstance: openstack passwordSelectors: null secret: osp-secret enabled: false heatInstance: heat ceilometer: enabled: true secret: osp-secret cloudkitty: apiTimeout: 0 cloudKittyAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 resources: {} tls: api: internal: {} public: {} caBundleSecretName: combined-ca-bundle cloudKittyProc: replicas: 1 resources: {} tls: caBundleSecretName: combined-ca-bundle databaseAccount: cloudkitty databaseInstance: openstack enabled: false memcachedInstance: memcached passwordSelector: aodhService: AodhPassword ceilometerService: CeilometerPassword cloudKittyService: CloudKittyPassword preserveJobs: false rabbitMqClusterName: rabbitmq s3StorageConfig: schemas: - effectiveDate: '2024-11-18' version: v13 secret: name: cloudkitty-loki-s3 type: s3 secret: osp-secret serviceUser: cloudkitty storageClass: local-storage logging: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 cloNamespace: openshift-logging enabled: false ipaddr: 172.17.0.80 port: 10514 metricStorage: enabled: false monitoringStack: alertingEnabled: true scrapeInterval: 30s storage: persistent: pvcStorageRequest: 10G retention: 24h strategy: persistent 2026-01-22 12:13:36,832 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Apply the OpenStackControlPlane CR output_dir={{ cifmw_edpm_prepare_basedir }}/artifacts, script=oc apply -f {{ cifmw_edpm_prepare_crs_kustomize_result.output_path }}] *** 2026-01-22 12:13:36,833 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.051) 0:08:49.542 ****** 2026-01-22 12:13:36,833 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.051) 0:08:49.540 ****** 2026-01-22 12:13:36,888 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_010_apply_the.log 2026-01-22 12:13:37,248 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:13:37,261 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Wait for control plane to change its status seconds={{ cifmw_edpm_prepare_wait_controplane_status_change_sec }}] *** 2026-01-22 12:13:37,261 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:37 +0000 (0:00:00.428) 0:08:49.970 ****** 2026-01-22 12:13:37,261 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:37 +0000 (0:00:00.428) 0:08:49.969 ****** 2026-01-22 12:13:37,290 p=33295 u=zuul n=ansible | Pausing for 30 seconds 2026-01-22 12:14:07,324 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:14:07,335 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Wait for OpenStack controlplane to be deployed _raw_params=oc wait OpenStackControlPlane {{ _ctlplane_name }} --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for=condition=ready --timeout={{ cifmw_edpm_prepare_timeout }}m] *** 2026-01-22 12:14:07,335 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:14:07 +0000 (0:00:30.074) 0:09:20.044 ****** 2026-01-22 12:14:07,335 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:14:07 +0000 (0:00:30.074) 0:09:20.043 ****** 2026-01-22 12:19:30,435 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:30,445 p=33295 u=zuul n=ansible | TASK [Extract and install OpenStackControlplane CA role=install_openstack_ca] *** 2026-01-22 12:19:30,445 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:05:23.110) 0:14:43.155 ****** 2026-01-22 12:19:30,445 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:05:23.110) 0:14:43.153 ****** 2026-01-22 12:19:30,549 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Get CA bundle data with retries] ****************** 2026-01-22 12:19:30,549 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:00:00.103) 0:14:43.258 ****** 2026-01-22 12:19:30,549 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:00:00.103) 0:14:43.256 ****** 2026-01-22 12:19:30,948 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:30,958 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Set _ca_bundle fact if CA returned from OCP] ****** 2026-01-22 12:19:30,958 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:00:00.408) 0:14:43.667 ****** 2026-01-22 12:19:30,958 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:00:00.408) 0:14:43.665 ****** 2026-01-22 12:19:30,995 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:31,005 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Creating tls-ca-bundle.pem from CA bundle dest={{ cifmw_install_openstack_ca_file_full_path }}, content={{ _ca_bundle }}, mode=0644] *** 2026-01-22 12:19:31,006 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.047) 0:14:43.715 ****** 2026-01-22 12:19:31,006 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.047) 0:14:43.713 ****** 2026-01-22 12:19:31,423 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:31,433 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Check if OpenStackControlplane CA file is present path={{ cifmw_install_openstack_ca_file_full_path }}, get_attributes=False, get_checksum=False, get_mime=False] *** 2026-01-22 12:19:31,434 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.428) 0:14:44.143 ****** 2026-01-22 12:19:31,434 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.428) 0:14:44.141 ****** 2026-01-22 12:19:31,620 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:31,628 p=33295 u=zuul n=ansible | TASK [Call install_ca role to inject OpenStackControlplane CA file if present role=install_ca] *** 2026-01-22 12:19:31,628 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.194) 0:14:44.337 ****** 2026-01-22 12:19:31,628 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.194) 0:14:44.335 ****** 2026-01-22 12:19:31,683 p=33295 u=zuul n=ansible | TASK [install_ca : Ensure target directory exists path={{ cifmw_install_ca_trust_dir }}, state=directory, mode=0755] *** 2026-01-22 12:19:31,683 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.055) 0:14:44.393 ****** 2026-01-22 12:19:31,684 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.055) 0:14:44.391 ****** 2026-01-22 12:19:31,887 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:31,895 p=33295 u=zuul n=ansible | TASK [install_ca : Install internal CA from url url={{ cifmw_install_ca_url }}, dest={{ cifmw_install_ca_trust_dir }}, validate_certs={{ cifmw_install_ca_url_validate_certs | default(omit) }}, mode=0644] *** 2026-01-22 12:19:31,895 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.211) 0:14:44.605 ****** 2026-01-22 12:19:31,896 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.211) 0:14:44.603 ****** 2026-01-22 12:19:31,918 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:31,927 p=33295 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from inline dest={{ cifmw_install_ca_trust_dir }}/cifmw_inline_ca_bundle.crt, content={{ cifmw_install_ca_bundle_inline }}, mode=0644] *** 2026-01-22 12:19:31,927 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.031) 0:14:44.636 ****** 2026-01-22 12:19:31,927 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.031) 0:14:44.634 ****** 2026-01-22 12:19:31,949 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:31,959 p=33295 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from file dest={{ cifmw_install_ca_trust_dir }}/{{ cifmw_install_ca_bundle_src | basename }}, src={{ cifmw_install_ca_bundle_src }}, mode=0644] *** 2026-01-22 12:19:31,959 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.032) 0:14:44.668 ****** 2026-01-22 12:19:31,959 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.032) 0:14:44.666 ****** 2026-01-22 12:19:32,405 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:32,413 p=33295 u=zuul n=ansible | TASK [install_ca : Update ca bundle _raw_params=update-ca-trust] *************** 2026-01-22 12:19:32,413 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:32 +0000 (0:00:00.454) 0:14:45.123 ****** 2026-01-22 12:19:32,413 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:32 +0000 (0:00:00.454) 0:14:45.121 ****** 2026-01-22 12:19:34,008 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:34,039 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Extract keystone endpoint host _raw_params=oc get keystoneapi keystone --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} -o jsonpath='{ .status.apiEndpoints.public }'] *** 2026-01-22 12:19:34,040 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:01.626) 0:14:46.749 ****** 2026-01-22 12:19:34,040 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:01.626) 0:14:46.747 ****** 2026-01-22 12:19:34,400 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:34,408 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Wait for keystone endpoint to exist in DNS url={{ _cifmw_edpm_prepare_keystone_endpoint_out.stdout | trim }}, status_code={{ _keystone_response_codes }}, validate_certs={{ cifmw_edpm_prepare_verify_tls }}] *** 2026-01-22 12:19:34,408 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:00.368) 0:14:47.118 ****** 2026-01-22 12:19:34,408 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:00.368) 0:14:47.116 ****** 2026-01-22 12:19:34,850 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:34,882 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:19:34,883 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:00.474) 0:14:47.592 ****** 2026-01-22 12:19:34,883 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:00.474) 0:14:47.590 ****** 2026-01-22 12:19:35,023 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:35,032 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:19:35,032 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.149) 0:14:47.741 ****** 2026-01-22 12:19:35,032 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.149) 0:14:47.739 ****** 2026-01-22 12:19:35,182 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:35,192 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_ctlplane_deploy _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:19:35,192 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.160) 0:14:47.902 ****** 2026-01-22 12:19:35,192 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.160) 0:14:47.900 ****** 2026-01-22 12:19:35,327 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,358 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2026-01-22 12:19:35,359 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.166) 0:14:48.068 ****** 2026-01-22 12:19:35,359 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.166) 0:14:48.066 ****** 2026-01-22 12:19:35,557 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:35,574 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Define minimal set of repo variables when not running on Zuul _install_yamls_repos={{ ( { 'OPENSTACK_REPO': operators_build_output[cifmw_operator_build_meta_name].git_src_dir, 'OPENSTACK_BRANCH': '', 'GIT_CLONE_OPTS': '-l', } if (cifmw_operator_build_meta_name is defined and cifmw_operator_build_meta_name in operators_build_output) else {} ) }}] *** 2026-01-22 12:19:35,575 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.216) 0:14:48.284 ****** 2026-01-22 12:19:35,575 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.216) 0:14:48.282 ****** 2026-01-22 12:19:35,602 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,611 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Set install_yamls Makefile environment variables cifmw_edpm_deploy_baremetal_common_env={{ cifmw_install_yamls_environment | combine({'PATH': cifmw_path}) | combine(_install_yamls_repos | default({})) }}, cifmw_edpm_deploy_baremetal_make_openstack_env={{ cifmw_edpm_deploy_baremetal_make_openstack_env | default({}) | combine( { 'OPENSTACK_IMG': operators_build_output[cifmw_operator_build_meta_name].image_catalog, } if (cifmw_operator_build_meta_name is defined and cifmw_operator_build_meta_name in operators_build_output) else {} ) }}, cifmw_edpm_deploy_baremetal_operators_build_output={{ operators_build_output }}] *** 2026-01-22 12:19:35,611 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.036) 0:14:48.321 ****** 2026-01-22 12:19:35,612 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.036) 0:14:48.319 ****** 2026-01-22 12:19:35,650 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,660 p=33295 u=zuul n=ansible | TASK [Create virtual baremetal name=install_yamls_makes, tasks_from=make_edpm_baremetal_compute] *** 2026-01-22 12:19:35,660 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.048) 0:14:48.369 ****** 2026-01-22 12:19:35,660 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.048) 0:14:48.368 ****** 2026-01-22 12:19:35,686 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,695 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Create the config file mode=0644, content={{ cifmw_edpm_deploy_baremetal_nova_compute_extra_config }}, dest={{ _cifmw_edpm_deploy_baremetal_nova_extra_config_file }}] *** 2026-01-22 12:19:35,695 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.404 ****** 2026-01-22 12:19:35,695 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.402 ****** 2026-01-22 12:19:35,721 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,730 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Define DATAPLANE_EXTRA_NOVA_CONFIG_FILE cifmw_edpm_deploy_baremetal_common_env={{ cifmw_edpm_deploy_baremetal_common_env | default({}) | combine({'DATAPLANE_EXTRA_NOVA_CONFIG_FILE': _cifmw_edpm_deploy_baremetal_nova_extra_config_file }) }}, cacheable=True] *** 2026-01-22 12:19:35,730 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.439 ****** 2026-01-22 12:19:35,730 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.437 ****** 2026-01-22 12:19:35,755 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,764 p=33295 u=zuul n=ansible | TASK [Prepare OpenStack Dataplane NodeSet CR name=install_yamls_makes, tasks_from=make_edpm_deploy_baremetal_prep] *** 2026-01-22 12:19:35,764 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.473 ****** 2026-01-22 12:19:35,764 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.471 ****** 2026-01-22 12:19:35,793 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,804 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Perform kustomizations to the OpenStackDataPlaneNodeSet CR target_path={{ cifmw_edpm_deploy_openstack_crs_path }}, sort_ascending=False, kustomizations={% if content_provider_registry_ip is defined or not cifmw_edpm_deploy_baremetal_bootc %} apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization patches: - target: kind: OpenStackDataPlaneNodeSet patch: |- {% if content_provider_registry_ip is defined %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_container_registry_insecure_registries value: ["{{ content_provider_registry_ip }}:5001"] {% endif %} {% if not cifmw_edpm_deploy_baremetal_bootc %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_bootstrap_command value: sudo dnf -y update {% endif %} {% endif %}, kustomizations_paths={{ [ ( [ cifmw_edpm_deploy_baremetal_manifests_dir, 'kustomizations', 'dataplane' ] | ansible.builtin.path_join ) ] }}] *** 2026-01-22 12:19:35,804 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.040) 0:14:48.514 ****** 2026-01-22 12:19:35,804 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.040) 0:14:48.512 ****** 2026-01-22 12:19:35,829 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,838 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Log the CR that is about to be applied var=cifmw_edpm_deploy_baremetal_crs_kustomize_result] *** 2026-01-22 12:19:35,839 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.548 ****** 2026-01-22 12:19:35,839 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.546 ****** 2026-01-22 12:19:35,867 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,878 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Create repo-setup-downstream OpenStackDataPlaneService _raw_params=oc apply -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }} -f "{{ cifmw_installyamls_repos }}/devsetup/edpm/services/dataplane_v1beta1_openstackdataplaneservice_reposetup_downstream.yaml"] *** 2026-01-22 12:19:35,878 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.038) 0:14:48.587 ****** 2026-01-22 12:19:35,878 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.038) 0:14:48.585 ****** 2026-01-22 12:19:35,900 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,910 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Get list of services defined under OpenStackDataPlaneNodeSet resource _raw_params=yq '.spec.services[]' {{ cifmw_edpm_deploy_baremetal_crs_kustomize_result.output_path }}] *** 2026-01-22 12:19:35,910 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.032) 0:14:48.619 ****** 2026-01-22 12:19:35,910 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.032) 0:14:48.617 ****** 2026-01-22 12:19:35,934 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,943 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Patch OpenStackDataPlaneNodeSet resource to add "repo-setup-downstream" service _raw_params=yq -i '.spec.services = ["repo-setup-downstream"] + .spec.services' {{ cifmw_edpm_deploy_baremetal_crs_kustomize_result.output_path }}] *** 2026-01-22 12:19:35,943 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.033) 0:14:48.653 ****** 2026-01-22 12:19:35,943 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.033) 0:14:48.651 ****** 2026-01-22 12:19:35,967 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,976 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Patch OpenStackDataPlaneNodeSet resource to replace "repo-setup" with "repo-setup-downstream" service _raw_params=yq -i '(.spec.services[] | select(. == "repo-setup")) |= "repo-setup-downstream"' {{ cifmw_edpm_deploy_baremetal_crs_kustomize_result.output_path }}] *** 2026-01-22 12:19:35,976 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.032) 0:14:48.685 ****** 2026-01-22 12:19:35,976 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.032) 0:14:48.683 ****** 2026-01-22 12:19:36,001 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,010 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Apply the OpenStackDataPlaneNodeSet CR output_dir={{ cifmw_edpm_deploy_baremetal_basedir }}/artifacts, script=oc apply -f {{ cifmw_edpm_deploy_baremetal_crs_kustomize_result.output_path }}] *** 2026-01-22 12:19:36,010 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.033) 0:14:48.719 ****** 2026-01-22 12:19:36,010 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.033) 0:14:48.717 ****** 2026-01-22 12:19:36,034 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,043 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Wait for Ironic to be ready _raw_params=oc wait pod -l name=ironic -n baremetal-operator-system --for=condition=Ready --timeout={{ cifmw_edpm_deploy_baremetal_wait_ironic_timeout_mins }}m] *** 2026-01-22 12:19:36,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.033) 0:14:48.752 ****** 2026-01-22 12:19:36,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.033) 0:14:48.750 ****** 2026-01-22 12:19:36,070 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,081 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Wait for OpenStack Provision Server pod to be created _raw_params=oc get po -l osp-provisionserver/name=openstack-edpm-ipam-provisionserver -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }} -o name] *** 2026-01-22 12:19:36,082 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:48.791 ****** 2026-01-22 12:19:36,082 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:48.789 ****** 2026-01-22 12:19:36,108 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,120 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Wait for OpenStack Provision Server deployment to be available _raw_params=oc wait deployment openstack-edpm-ipam-provisionserver-openstackprovisionserver -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for condition=Available --timeout={{ cifmw_edpm_deploy_baremetal_wait_provisionserver_timeout_mins }}m] *** 2026-01-22 12:19:36,120 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:48.829 ****** 2026-01-22 12:19:36,120 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:48.827 ****** 2026-01-22 12:19:36,145 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,155 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Wait for baremetal nodes to reach 'provisioned' state _raw_params=oc wait bmh --all -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for=jsonpath='{.status.provisioning.state}'=provisioned --timeout={{ cifmw_edpm_deploy_baremetal_wait_bmh_timeout_mins }}m] *** 2026-01-22 12:19:36,155 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:48.865 ****** 2026-01-22 12:19:36,155 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:48.863 ****** 2026-01-22 12:19:36,182 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,191 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Register the list of compute nodes _raw_params=oc get bmh -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }}] *** 2026-01-22 12:19:36,191 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:48.900 ****** 2026-01-22 12:19:36,191 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:48.898 ****** 2026-01-22 12:19:36,214 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,224 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Print the list of compute nodes var=compute_nodes_output.stdout_lines] *** 2026-01-22 12:19:36,224 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.032) 0:14:48.933 ****** 2026-01-22 12:19:36,224 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.032) 0:14:48.931 ****** 2026-01-22 12:19:36,248 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,257 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Wait for OpenStackDataPlaneNodeSet to be deployed _raw_params=oc wait OpenStackDataPlaneNodeSet {{ cr_name }} --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for=condition=ready --timeout={{ cifmw_edpm_deploy_baremetal_wait_dataplane_timeout_mins }}m] *** 2026-01-22 12:19:36,257 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.032) 0:14:48.966 ****** 2026-01-22 12:19:36,257 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.032) 0:14:48.964 ****** 2026-01-22 12:19:36,279 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,288 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Run nova-manage discover_hosts to ensure compute nodes are mapped _raw_params=oc rsh -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }} nova-cell0-conductor-0 nova-manage cell_v2 discover_hosts --verbose] *** 2026-01-22 12:19:36,288 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.031) 0:14:48.998 ****** 2026-01-22 12:19:36,288 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.031) 0:14:48.996 ****** 2026-01-22 12:19:36,312 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,330 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2026-01-22 12:19:36,330 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.041) 0:14:49.040 ****** 2026-01-22 12:19:36,330 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.041) 0:14:49.038 ****** 2026-01-22 12:19:36,393 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:36,404 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Set compute config and common environment facts compute_config={{ cifmw_libvirt_manager_configuration['vms']['compute'] }}, cifmw_libvirt_manager_common_env={{ cifmw_install_yamls_environment | combine({'PATH': cifmw_path }) }}, cacheable=True] *** 2026-01-22 12:19:36,404 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.073) 0:14:49.113 ****** 2026-01-22 12:19:36,404 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.073) 0:14:49.111 ****** 2026-01-22 12:19:36,432 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,444 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Ensure needed directories exist path={{ item }}, state=directory, mode=0755] *** 2026-01-22 12:19:36,444 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.153 ****** 2026-01-22 12:19:36,444 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.151 ****** 2026-01-22 12:19:36,479 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=/home/zuul/ci-framework-data/workload) 2026-01-22 12:19:36,490 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/edpm_compute) 2026-01-22 12:19:36,496 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/openstack/cr/) 2026-01-22 12:19:36,497 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,508 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Ensure image is available _raw_params=get_image.yml] *** 2026-01-22 12:19:36,509 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.064) 0:14:49.218 ****** 2026-01-22 12:19:36,509 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.064) 0:14:49.216 ****** 2026-01-22 12:19:36,535 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,544 p=33295 u=zuul n=ansible | TASK [Create EDPM compute VMs name=install_yamls_makes, tasks_from=make_edpm_compute.yml] *** 2026-01-22 12:19:36,544 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:49.253 ****** 2026-01-22 12:19:36,544 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:49.251 ****** 2026-01-22 12:19:36,571 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,581 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Catch compute IPs _raw_params=virsh -c qemu:///system -q domifaddr --source arp --domain edpm-compute-{{ item }}] *** 2026-01-22 12:19:36,581 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.037) 0:14:49.291 ****** 2026-01-22 12:19:36,581 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.037) 0:14:49.289 ****** 2026-01-22 12:19:36,611 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,621 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Ensure we get SSH host={{ item.stdout.split()[-1].split('/')[0] }}, port=22, timeout=60] *** 2026-01-22 12:19:36,621 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.330 ****** 2026-01-22 12:19:36,621 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.328 ****** 2026-01-22 12:19:36,647 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,657 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Output CR for extra computes dest={{ cifmw_libvirt_manager_basedir }}/artifacts/{{ cifmw_install_yamls_defaults['NAMESPACE'] }}/cr/99-cifmw-computes-{{ item }}.yaml, src=kustomize_compute.yml.j2, mode=0644] *** 2026-01-22 12:19:36,657 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.036) 0:14:49.366 ****** 2026-01-22 12:19:36,657 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.036) 0:14:49.364 ****** 2026-01-22 12:19:36,689 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,705 p=33295 u=zuul n=ansible | TASK [Prepare for HCI deploy phase 1 name=hci_prepare, tasks_from=phase1.yml] *** 2026-01-22 12:19:36,706 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.048) 0:14:49.415 ****** 2026-01-22 12:19:36,706 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.048) 0:14:49.413 ****** 2026-01-22 12:19:36,732 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,742 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Set EDPM related vars cifmw_edpm_deploy_env={{ cifmw_install_yamls_environment | combine({'PATH': cifmw_path}) | combine({'DATAPLANE_REGISTRY_URL': cifmw_edpm_deploy_registry_url }) | combine({'DATAPLANE_CONTAINER_TAG': cifmw_repo_setup_full_hash | default(cifmw_install_yamls_defaults['DATAPLANE_CONTAINER_TAG']) }) | combine(cifmw_edpm_deploy_extra_vars | default({})) | combine(_install_yamls_repos | default({})) }}, cacheable=True] *** 2026-01-22 12:19:36,742 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.036) 0:14:49.451 ****** 2026-01-22 12:19:36,742 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.036) 0:14:49.449 ****** 2026-01-22 12:19:36,770 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,781 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Create the config file mode=0644, content={{ cifmw_edpm_deploy_nova_compute_extra_config }}, dest={{ _cifmw_edpm_deploy_nova_extra_config_file }}] *** 2026-01-22 12:19:36,781 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.491 ****** 2026-01-22 12:19:36,781 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.489 ****** 2026-01-22 12:19:36,812 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,820 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Define DATAPLANE_EXTRA_NOVA_CONFIG_FILE cifmw_edpm_deploy_env={{ cifmw_edpm_deploy_env | default({}) | combine({'DATAPLANE_EXTRA_NOVA_CONFIG_FILE': _cifmw_edpm_deploy_nova_extra_config_file }) }}, cacheable=True] *** 2026-01-22 12:19:36,820 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:49.529 ****** 2026-01-22 12:19:36,820 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:49.527 ****** 2026-01-22 12:19:36,854 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,865 p=33295 u=zuul n=ansible | TASK [Prepare OpenStack Dataplane NodeSet CR name=install_yamls_makes, tasks_from=make_edpm_deploy_prep] *** 2026-01-22 12:19:36,866 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.045) 0:14:49.575 ****** 2026-01-22 12:19:36,866 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.045) 0:14:49.573 ****** 2026-01-22 12:19:36,893 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,905 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Perform kustomizations to the OpenStackDataPlaneNodeSet CR target_path={{ cifmw_edpm_deploy_openstack_crs_path }}, sort_ascending=False, kustomizations_paths={{ [ ( [ cifmw_edpm_deploy_manifests_dir, 'kustomizations', 'dataplane' ] | ansible.builtin.path_join ) ] }}] *** 2026-01-22 12:19:36,905 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.615 ****** 2026-01-22 12:19:36,906 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.613 ****** 2026-01-22 12:19:36,931 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,941 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Log the CR that is about to be applied var=cifmw_edpm_deploy_crs_kustomize_result] *** 2026-01-22 12:19:36,941 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:49.650 ****** 2026-01-22 12:19:36,941 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:49.648 ****** 2026-01-22 12:19:36,967 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,979 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Apply dataplane resources but ignore DataPlaneDeployment kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, definition={{ lookup('file', cifmw_edpm_deploy_crs_kustomize_result.output_path) | from_yaml_all | rejectattr('kind', 'search', cifmw_edpm_deploy_step2_kind) }}] *** 2026-01-22 12:19:36,979 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.037) 0:14:49.688 ****** 2026-01-22 12:19:36,979 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.037) 0:14:49.686 ****** 2026-01-22 12:19:37,006 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,019 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Wait for OpenStackDataPlaneNodeSet become SetupReady _raw_params=oc wait OpenStackDataPlaneNodeSet {{ cr_name }} --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for=condition=SetupReady --timeout={{ cifmw_edpm_deploy_timeout }}m] *** 2026-01-22 12:19:37,019 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.040) 0:14:49.729 ****** 2026-01-22 12:19:37,019 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.040) 0:14:49.727 ****** 2026-01-22 12:19:37,051 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,062 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Apply DataPlaneDeployment resource kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, definition={{ lookup('file', cifmw_edpm_deploy_crs_kustomize_result.output_path) | from_yaml_all | selectattr('kind', 'search', cifmw_edpm_deploy_step2_kind) }}] *** 2026-01-22 12:19:37,062 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.042) 0:14:49.771 ****** 2026-01-22 12:19:37,062 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.042) 0:14:49.770 ****** 2026-01-22 12:19:37,091 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,106 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Wait for OpenStackDataPlaneDeployment become Ready _raw_params=oc wait OpenStackDataPlaneDeployment {{ cr_name }} --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for=condition=Ready --timeout={{ cifmw_edpm_deploy_timeout }}m] *** 2026-01-22 12:19:37,106 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.043) 0:14:49.815 ****** 2026-01-22 12:19:37,106 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.043) 0:14:49.813 ****** 2026-01-22 12:19:37,132 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,141 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Run nova-manage discover_hosts to ensure compute nodes are mapped output_dir={{ cifmw_basedir }}/artifacts, executable=/bin/bash, script=set -xe oc rsh --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} nova-cell0-conductor-0 nova-manage cell_v2 discover_hosts --verbose ] *** 2026-01-22 12:19:37,141 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.035) 0:14:49.851 ****** 2026-01-22 12:19:37,141 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.035) 0:14:49.849 ****** 2026-01-22 12:19:37,177 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,189 p=33295 u=zuul n=ansible | TASK [Validate EDPM name=install_yamls_makes, tasks_from=make_edpm_deploy_instance] *** 2026-01-22 12:19:37,189 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.047) 0:14:49.898 ****** 2026-01-22 12:19:37,189 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.047) 0:14:49.896 ****** 2026-01-22 12:19:37,218 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,267 p=33295 u=zuul n=ansible | PLAY [Deploy ceph using hooks] ************************************************* 2026-01-22 12:19:37,287 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:19:37,287 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.098) 0:14:49.996 ****** 2026-01-22 12:19:37,287 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.098) 0:14:49.994 ****** 2026-01-22 12:19:37,350 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:37,358 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:19:37,358 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.071) 0:14:50.068 ****** 2026-01-22 12:19:37,358 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.071) 0:14:50.066 ****** 2026-01-22 12:19:37,439 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:37,452 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_ceph _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:19:37,452 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.093) 0:14:50.161 ****** 2026-01-22 12:19:37,452 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.093) 0:14:50.159 ****** 2026-01-22 12:19:37,535 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,580 p=33295 u=zuul n=ansible | PLAY [Continue HCI deploy, deploy architecture and validate workflow] ********** 2026-01-22 12:19:37,614 p=33295 u=zuul n=ansible | TASK [Prepare for HCI deploy phase 2 name=hci_prepare, tasks_from=phase2.yml] *** 2026-01-22 12:19:37,614 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.162) 0:14:50.324 ****** 2026-01-22 12:19:37,614 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.162) 0:14:50.322 ****** 2026-01-22 12:19:37,639 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,648 p=33295 u=zuul n=ansible | TASK [Continue HCI deployment name=edpm_deploy] ******************************** 2026-01-22 12:19:37,648 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.033) 0:14:50.357 ****** 2026-01-22 12:19:37,648 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.033) 0:14:50.355 ****** 2026-01-22 12:19:37,674 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,685 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:19:37,685 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.037) 0:14:50.394 ****** 2026-01-22 12:19:37,685 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.037) 0:14:50.392 ****** 2026-01-22 12:19:37,745 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:37,772 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:19:37,772 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.087) 0:14:50.482 ****** 2026-01-22 12:19:37,772 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.087) 0:14:50.480 ****** 2026-01-22 12:19:37,871 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:37,885 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_deploy _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:19:37,885 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.112) 0:14:50.595 ****** 2026-01-22 12:19:37,885 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.112) 0:14:50.593 ****** 2026-01-22 12:19:37,975 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,990 p=33295 u=zuul n=ansible | TASK [Run validations name=validations] **************************************** 2026-01-22 12:19:37,990 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.104) 0:14:50.699 ****** 2026-01-22 12:19:37,990 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.104) 0:14:50.697 ****** 2026-01-22 12:19:38,016 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,033 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:19:38,033 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.043) 0:14:50.743 ****** 2026-01-22 12:19:38,033 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.043) 0:14:50.741 ****** 2026-01-22 12:19:38,055 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,063 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:19:38,063 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:50.773 ****** 2026-01-22 12:19:38,063 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:50.771 ****** 2026-01-22 12:19:38,088 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,100 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_deploy _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:19:38,101 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.037) 0:14:50.810 ****** 2026-01-22 12:19:38,101 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.037) 0:14:50.808 ****** 2026-01-22 12:19:38,190 p=33295 u=zuul n=ansible | skipping: [localhost] => (item={'name': 'Fetch compute facts', 'type': 'playbook', 'inventory': '/home/zuul/ci-framework-data/artifacts/zuul_inventory.yml', 'source': 'fetch_compute_facts.yml'}) 2026-01-22 12:19:38,192 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,208 p=33295 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Environment Definition file existence path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2026-01-22 12:19:38,209 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.107) 0:14:50.918 ****** 2026-01-22 12:19:38,209 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.107) 0:14:50.916 ****** 2026-01-22 12:19:38,235 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,246 p=33295 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Definition file existance that=['_net_env_def_stat.stat.exists'], msg=Ensure that the Networking Environment Definition file exists in {{ cifmw_networking_mapper_networking_env_def_path }}, quiet=True] *** 2026-01-22 12:19:38,246 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.037) 0:14:50.955 ****** 2026-01-22 12:19:38,246 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.037) 0:14:50.953 ****** 2026-01-22 12:19:38,267 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,276 p=33295 u=zuul n=ansible | TASK [networking_mapper : Load the Networking Definition from file path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2026-01-22 12:19:38,276 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:50.986 ****** 2026-01-22 12:19:38,276 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:50.984 ****** 2026-01-22 12:19:38,300 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,310 p=33295 u=zuul n=ansible | TASK [networking_mapper : Set cifmw_networking_env_definition is present cifmw_networking_env_definition={{ _net_env_def_slurp['content'] | b64decode | from_yaml }}, cacheable=True] *** 2026-01-22 12:19:38,310 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.033) 0:14:51.020 ****** 2026-01-22 12:19:38,310 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.033) 0:14:51.018 ****** 2026-01-22 12:19:38,331 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,347 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Fetch network facts gather_subset=network] ***************** 2026-01-22 12:19:38,347 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.036) 0:14:51.057 ****** 2026-01-22 12:19:38,347 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.036) 0:14:51.055 ****** 2026-01-22 12:19:38,376 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=controller) 2026-01-22 12:19:38,380 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=crc) 2026-01-22 12:19:38,392 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=localhost) 2026-01-22 12:19:38,399 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=standalone) 2026-01-22 12:19:38,400 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,410 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Look for nova migration keypair file path={{ cifmw_basedir }}/artifacts/nova_migration_key] *** 2026-01-22 12:19:38,410 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.062) 0:14:51.119 ****** 2026-01-22 12:19:38,410 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.062) 0:14:51.117 ****** 2026-01-22 12:19:38,431 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,440 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Create nova migration keypair if does not exists comment=nova migration, path={{ _ssh_file }}, type={{ cifmw_ssh_keytype | default('ecdsa') }}, size={{ cifmw_ssh_keysize | default(521) }}] *** 2026-01-22 12:19:38,440 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.150 ****** 2026-01-22 12:19:38,440 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.148 ****** 2026-01-22 12:19:38,461 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,469 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Generate needed facts out of local files cifmw_ci_gen_kustomize_values_ssh_authorizedkeys={{ lookup('file', '~/.ssh/id_cifw.pub', rstrip=False) }}, cifmw_ci_gen_kustomize_values_ssh_private_key={{ lookup('file', '~/.ssh/id_cifw', rstrip=False) }}, cifmw_ci_gen_kustomize_values_ssh_public_key={{ lookup('file', '~/.ssh/id_cifw.pub', rstrip=False) }}, cifmw_ci_gen_kustomize_values_migration_pub_key={{ lookup('file', _ssh_file ~ '.pub', rstrip=False)}}, cifmw_ci_gen_kustomize_values_migration_priv_key={{ lookup('file', _ssh_file, rstrip=False) }}, cifmw_ci_gen_kustomize_values_sshd_ranges={{ _ipv4_sshd_ranges + _ipv6_sshd_ranges }}] *** 2026-01-22 12:19:38,470 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.029) 0:14:51.179 ****** 2026-01-22 12:19:38,470 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.029) 0:14:51.177 ****** 2026-01-22 12:19:38,491 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,500 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Set cifmw_architecture_automation_file if not set before cifmw_architecture_automation_file={{ ( cifmw_architecture_repo, 'automation/vars', cifmw_architecture_scenario~'.yaml' ) | ansible.builtin.path_join }}] *** 2026-01-22 12:19:38,500 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.209 ****** 2026-01-22 12:19:38,500 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.207 ****** 2026-01-22 12:19:38,522 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,530 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load architecture automation file path={{ cifmw_architecture_automation_file }}] *** 2026-01-22 12:19:38,530 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.240 ****** 2026-01-22 12:19:38,530 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.238 ****** 2026-01-22 12:19:38,550 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,559 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Prepare automation data cifmw_deploy_architecture_steps={{ _parsed['vas'][cifmw_architecture_scenario] }}] *** 2026-01-22 12:19:38,559 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.028) 0:14:51.268 ****** 2026-01-22 12:19:38,559 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.028) 0:14:51.266 ****** 2026-01-22 12:19:38,580 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,593 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Ensure that architecture repo exists repo={{ cifmw_kustomize_deploy_architecture_repo_url }}, dest={{ cifmw_kustomize_deploy_architecture_repo_dest_dir }}, update=False, version={{ cifmw_kustomize_deploy_architecture_repo_version }}] *** 2026-01-22 12:19:38,593 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.034) 0:14:51.302 ****** 2026-01-22 12:19:38,593 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.034) 0:14:51.301 ****** 2026-01-22 12:19:38,614 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,627 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Gather the list of scenario folders paths={{ _va_scenario_dir }}, patterns=kustomization.y*ml, recurse=True] *** 2026-01-22 12:19:38,627 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.034) 0:14:51.337 ****** 2026-01-22 12:19:38,627 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.034) 0:14:51.335 ****** 2026-01-22 12:19:38,659 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,667 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Check if scenario is in the list msg=You need to properly set the `cifmw_architecture_scenario` variable in order to select the architecture-based scenario to deploy. You can take a list of scenario in the `examples/va` folder in the architecture repo. ] *** 2026-01-22 12:19:38,667 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.039) 0:14:51.376 ****** 2026-01-22 12:19:38,667 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.039) 0:14:51.374 ****** 2026-01-22 12:19:38,697 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,706 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Ensure that destination directory exists path={{ cifmw_kustomize_deploy_kustomizations_dest_dir }}, mode=0755, state=directory] *** 2026-01-22 12:19:38,706 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.038) 0:14:51.415 ****** 2026-01-22 12:19:38,706 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.038) 0:14:51.413 ****** 2026-01-22 12:19:38,725 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,734 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Set the final cifmw_architecture_user_kustomize based on its patches _cifmw_kustomize_deploy_user_kustomize={{ _cifmw_kustomize_deploy_user_kustomize | default({}) | combine(item.value, recursive=True) }}] *** 2026-01-22 12:19:38,734 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.028) 0:14:51.444 ****** 2026-01-22 12:19:38,735 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.028) 0:14:51.442 ****** 2026-01-22 12:19:38,802 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=cifmw_architecture_user_kustomize) 2026-01-22 12:19:38,803 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,813 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Set the final cifmw_architecture_user_kustomize_base64 based on its patches _cifmw_kustomize_deploy_user_base64_kustomize={{ _b64_kustomize_user_patches | ci_kustomize_deploy_combine_base64_patch_dict }}] *** 2026-01-22 12:19:38,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.078) 0:14:51.522 ****** 2026-01-22 12:19:38,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.078) 0:14:51.520 ****** 2026-01-22 12:19:38,854 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,869 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Load nncp kustomization.yaml path={{ _nncp_kust_path }}] *** 2026-01-22 12:19:38,869 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.056) 0:14:51.579 ****** 2026-01-22 12:19:38,870 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.056) 0:14:51.577 ****** 2026-01-22 12:19:38,896 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,905 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Remove node_1 and node_2 backup=True, content={{ _nncp_updated | to_nice_yaml }}, dest={{ _nncp_kust_path }}, mode=0644] *** 2026-01-22 12:19:38,905 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.035) 0:14:51.614 ****** 2026-01-22 12:19:38,905 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.035) 0:14:51.612 ****** 2026-01-22 12:19:38,943 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,952 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Reduce NNCP ocp_nodes backup=True, dest={{ _nncp_ocp_nodes_path }}, mode=0644, content=--- apiVersion: nmstate.io/v1 kind: NodeNetworkConfigurationPolicy metadata: name: node-0 labels: osp/nncm-config-type: standard ] *** 2026-01-22 12:19:38,952 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.047) 0:14:51.662 ****** 2026-01-22 12:19:38,952 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.047) 0:14:51.660 ****** 2026-01-22 12:19:38,973 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,983 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Replace replicas backup=True, path={{ _ctlplane_path }}, regexp=^(.+) replicas: [2-9]+$, replace=\1 replicas: 1] *** 2026-01-22 12:19:38,983 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.692 ****** 2026-01-22 12:19:38,983 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.690 ****** 2026-01-22 12:19:39,003 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,021 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create role needed directories path={{ cifmw_cls_manifests_dir }}, state=directory, mode=0755] *** 2026-01-22 12:19:39,021 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.038) 0:14:51.730 ****** 2026-01-22 12:19:39,021 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.038) 0:14:51.728 ****** 2026-01-22 12:19:39,039 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,047 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create the cifmw_cls_namespace namespace" kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ cifmw_cls_namespace }}, kind=Namespace, state=present] *** 2026-01-22 12:19:39,048 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.026) 0:14:51.757 ****** 2026-01-22 12:19:39,048 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.026) 0:14:51.755 ****** 2026-01-22 12:19:39,065 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,073 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Save storage manifests as artifacts dest={{ cifmw_cls_manifests_dir }}/storage-class.yaml, content={{ cifmw_cls_storage_manifest | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:19:39,073 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.025) 0:14:51.782 ****** 2026-01-22 12:19:39,073 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.025) 0:14:51.780 ****** 2026-01-22 12:19:39,091 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,100 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Get k8s nodes kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Node] *** 2026-01-22 12:19:39,100 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.027) 0:14:51.809 ****** 2026-01-22 12:19:39,100 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.027) 0:14:51.807 ****** 2026-01-22 12:19:39,119 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,129 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Fetch hostnames for all hosts _raw_params=hostname] *** 2026-01-22 12:19:39,129 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.029) 0:14:51.838 ****** 2026-01-22 12:19:39,129 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.029) 0:14:51.836 ****** 2026-01-22 12:19:39,156 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=crc) 2026-01-22 12:19:39,163 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=standalone) 2026-01-22 12:19:39,170 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=controller) 2026-01-22 12:19:39,184 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=localhost) 2026-01-22 12:19:39,186 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,198 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Set the hosts k8s ansible hosts cifmw_ci_local_storage_k8s_hosts={{ _host_map | selectattr("key", "in", k8s_nodes_hostnames) | map(attribute="value") | list }}, cifmw_ci_local_storage_k8s_hostnames={{ k8s_nodes_hostnames }}] *** 2026-01-22 12:19:39,198 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.069) 0:14:51.908 ****** 2026-01-22 12:19:39,198 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.069) 0:14:51.906 ****** 2026-01-22 12:19:39,220 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,230 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Apply the storage class manifests kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, src={{ cifmw_cls_manifests_dir }}/storage-class.yaml] *** 2026-01-22 12:19:39,230 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.031) 0:14:51.939 ****** 2026-01-22 12:19:39,230 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.031) 0:14:51.937 ****** 2026-01-22 12:19:39,253 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,266 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create directories on worker node _raw_params=worker_node_dirs.yml] *** 2026-01-22 12:19:39,266 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.036) 0:14:51.976 ****** 2026-01-22 12:19:39,266 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.036) 0:14:51.974 ****** 2026-01-22 12:19:39,290 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=crc) 2026-01-22 12:19:39,291 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,304 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Generate pv related storage manifest file src=storage.yaml.j2, dest={{ cifmw_cls_manifests_dir }}/storage.yaml, mode=0644] *** 2026-01-22 12:19:39,304 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.037) 0:14:52.013 ****** 2026-01-22 12:19:39,304 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.037) 0:14:52.011 ****** 2026-01-22 12:19:39,330 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,339 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Apply pv related storage manifest file kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, src={{ cifmw_cls_manifests_dir }}/storage.yaml] *** 2026-01-22 12:19:39,339 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.034) 0:14:52.048 ****** 2026-01-22 12:19:39,339 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.034) 0:14:52.046 ****** 2026-01-22 12:19:39,361 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,377 p=33295 u=zuul n=ansible | TASK [Install subscriptions name=ci_gen_kustomize_values, tasks_from=olm_subscriptions_overlay.yml] *** 2026-01-22 12:19:39,377 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.038) 0:14:52.086 ****** 2026-01-22 12:19:39,377 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.038) 0:14:52.084 ****** 2026-01-22 12:19:39,399 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,408 p=33295 u=zuul n=ansible | TASK [Generate values.yaml for OLM resources name=ci_gen_kustomize_values] ***** 2026-01-22 12:19:39,409 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.031) 0:14:52.118 ****** 2026-01-22 12:19:39,409 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.031) 0:14:52.116 ****** 2026-01-22 12:19:39,463 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Generate snippets files _raw_params=generate_snippets.yml] *** 2026-01-22 12:19:39,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.054) 0:14:52.172 ****** 2026-01-22 12:19:39,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.054) 0:14:52.170 ****** 2026-01-22 12:19:39,503 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_gen_kustomize_values/tasks/generate_snippets.yml for localhost 2026-01-22 12:19:39,514 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure needed parameter is properly set that=['cifmw_architecture_scenario is defined', 'cifmw_architecture_scenario is not none'], msg=cifmw_architecture_scenario must be provided.] *** 2026-01-22 12:19:39,514 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.051) 0:14:52.224 ****** 2026-01-22 12:19:39,514 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.051) 0:14:52.222 ****** 2026-01-22 12:19:39,544 p=33295 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:19:39,560 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Stat original source file path={{ cifmw_ci_gen_kustomize_values_src_file }}, get_attributes=False, get_checksum=False, get_mime=False] *** 2026-01-22 12:19:39,561 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.046) 0:14:52.270 ****** 2026-01-22 12:19:39,561 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.046) 0:14:52.268 ****** 2026-01-22 12:19:39,764 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:39,773 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Assert source file exists that=['_src_stat.stat.exists'], msg={{ cifmw_ci_gen_kustomize_values_src_file }} doesn't exist.] *** 2026-01-22 12:19:39,773 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.212) 0:14:52.482 ****** 2026-01-22 12:19:39,773 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.212) 0:14:52.480 ****** 2026-01-22 12:19:39,809 p=33295 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:19:39,818 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Load original values file src={{ cifmw_ci_gen_kustomize_values_src_file }}] *** 2026-01-22 12:19:39,818 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.044) 0:14:52.527 ****** 2026-01-22 12:19:39,818 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.044) 0:14:52.525 ****** 2026-01-22 12:19:39,993 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:40,004 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Expose common data for future usage values_datatype={{ _datatype }}, snippet_datadir={{ _dest_dir }}, original_content={{ _config_map_content }}, _cifmw_gen_kustomize_values_extra_manifests={{ _raw_manifests | reject('equalto', _config_map_content) }}, _cifmw_gen_kustomize_values_base_cm_content={{ _config_map_content | ansible.utils.remove_keys( target=_cifmw_gen_kustomize_values_reject_expressions, matching_parameter='regex') }}, cacheable=False] *** 2026-01-22 12:19:40,004 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.186) 0:14:52.714 ****** 2026-01-22 12:19:40,005 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.186) 0:14:52.712 ****** 2026-01-22 12:19:40,216 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:40,229 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure we get the needed data depending on the values type _raw_params={{ _tasks }}] *** 2026-01-22 12:19:40,229 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.225) 0:14:52.939 ****** 2026-01-22 12:19:40,230 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.225) 0:14:52.937 ****** 2026-01-22 12:19:40,259 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:40,268 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure output directory exists path={{ snippet_datadir }}, state=directory, mode=0755] *** 2026-01-22 12:19:40,268 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.038) 0:14:52.978 ****** 2026-01-22 12:19:40,268 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.038) 0:14:52.976 ****** 2026-01-22 12:19:40,471 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:40,482 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Generate CI snippet backup=True, dest={{ (snippet_datadir, '02_ci_data.yaml') | path_join }}, src={{ _tmpl_check_path | first }}, mode=0644] *** 2026-01-22 12:19:40,483 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.214) 0:14:53.192 ****** 2026-01-22 12:19:40,483 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.214) 0:14:53.190 ****** 2026-01-22 12:19:40,956 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:40,966 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Generate the base64 CI ConfigMap patches _base64_patch={{ _patches_tuple[0] }}, _cifmw_gen_kustomize_values_extra_manifests={{ _patches_tuple[1] }}] *** 2026-01-22 12:19:40,966 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.483) 0:14:53.675 ****** 2026-01-22 12:19:40,966 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.483) 0:14:53.673 ****** 2026-01-22 12:19:41,001 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:41,011 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Push base64 CI patches backup=True, dest={{ (snippet_datadir, '03_user_data_b64.yaml') | path_join }}, content={{ _base64_patch | default({}) | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:19:41,011 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.045) 0:14:53.721 ****** 2026-01-22 12:19:41,011 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.045) 0:14:53.719 ****** 2026-01-22 12:19:41,429 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:41,438 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Push user provided dataset backup=True, dest={{ (snippet_datadir, '04_user_data.yaml') | path_join }}, content={{ cifmw_ci_gen_kustomize_values_userdata | default({}) | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:19:41,438 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.426) 0:14:54.147 ****** 2026-01-22 12:19:41,438 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.426) 0:14:54.145 ****** 2026-01-22 12:19:41,900 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:41,909 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Copy the base values.yaml backup=True, content={{ _cifmw_gen_kustomize_values_base_cm_content |to_nice_yaml }}, dest={{ ( snippet_datadir, cifmw_ci_gen_kustomize_values_original_cm_content_file_name ) | path_join }}, mode=0644] *** 2026-01-22 12:19:41,909 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.471) 0:14:54.619 ****** 2026-01-22 12:19:41,910 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.471) 0:14:54.617 ****** 2026-01-22 12:19:42,336 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:42,348 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Generate values file _raw_params=generate_values.yml] *** 2026-01-22 12:19:42,348 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.438) 0:14:55.058 ****** 2026-01-22 12:19:42,348 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.438) 0:14:55.056 ****** 2026-01-22 12:19:42,387 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_gen_kustomize_values/tasks/generate_values.yml for localhost 2026-01-22 12:19:42,406 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure we have needed parameter that=['values_datatype is defined', "values_datatype != ''"], msg=Please do not call this tasks file without calling the generate_snippet.yml first!] *** 2026-01-22 12:19:42,406 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.057) 0:14:55.115 ****** 2026-01-22 12:19:42,406 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.057) 0:14:55.113 ****** 2026-01-22 12:19:42,442 p=33295 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:19:42,452 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : List snippets paths={{ _dir_path }}, patterns=*.yml,*.yaml, excludes={{ cifmw_ci_gen_kustomize_values_original_cm_content_file_name }}, recurse=False] *** 2026-01-22 12:19:42,452 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.046) 0:14:55.162 ****** 2026-01-22 12:19:42,453 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.046) 0:14:55.160 ****** 2026-01-22 12:19:42,651 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:42,661 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure _content is empty _content={}] ********** 2026-01-22 12:19:42,661 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.208) 0:14:55.370 ****** 2026-01-22 12:19:42,661 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.208) 0:14:55.368 ****** 2026-01-22 12:19:42,686 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:42,697 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Load various snippet files path={{ file.path }}] *** 2026-01-22 12:19:42,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.035) 0:14:55.406 ****** 2026-01-22 12:19:42,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.035) 0:14:55.404 ****** 2026-01-22 12:19:42,927 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/02_ci_data.yaml) 2026-01-22 12:19:43,098 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/03_user_data_b64.yaml) 2026-01-22 12:19:43,267 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/04_user_data.yaml) 2026-01-22 12:19:43,277 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Combine snippets _content={{ _content | default(_cifmw_gen_kustomize_values_base_cm_content, true) | combine(_parsed, recursive=true) }} ] *** 2026-01-22 12:19:43,278 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.580) 0:14:55.987 ****** 2026-01-22 12:19:43,278 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.580) 0:14:55.985 ****** 2026-01-22 12:19:43,315 p=33295 u=zuul n=ansible | ok: [localhost] => (item=02_ci_data.yaml) 2026-01-22 12:19:43,327 p=33295 u=zuul n=ansible | ok: [localhost] => (item=03_user_data_b64.yaml) 2026-01-22 12:19:43,338 p=33295 u=zuul n=ansible | ok: [localhost] => (item=04_user_data.yaml) 2026-01-22 12:19:43,350 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure directories exist path={{ _destdir }}, state=directory, mode=0755] *** 2026-01-22 12:19:43,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.072) 0:14:56.059 ****** 2026-01-22 12:19:43,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.072) 0:14:56.057 ****** 2026-01-22 12:19:43,549 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:43,558 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Output values file backup=True, dest={{ (_destdir, cifmw_ci_gen_kustomize_values_dest_filename) | path_join }}, content={{ ( [ _content ] + _cifmw_gen_kustomize_values_extra_manifests ) | cifmw.general.to_nice_yaml_all }}, mode=0644] *** 2026-01-22 12:19:43,558 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.207) 0:14:56.267 ****** 2026-01-22 12:19:43,558 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.207) 0:14:56.265 ****** 2026-01-22 12:19:43,958 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:43,975 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Slurp generated values.yaml for OLM resources src={{ (cifmw_kustomize_deploy_basedir, 'artifacts', 'ci_gen_kustomize_values', 'olm-values', 'values.yaml') | path_join }}] *** 2026-01-22 12:19:43,975 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.416) 0:14:56.684 ****** 2026-01-22 12:19:43,975 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.416) 0:14:56.682 ****** 2026-01-22 12:19:43,996 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,005 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Copy generated values.yaml for OLM resources to localhost content={{ _cifmw_kustomize_deploy_olm_values_content.content | b64decode }}, dest={{ ( cifmw_kustomize_deploy_olm_source_files, 'values.yaml' ) | path_join }}, mode=0644] *** 2026-01-22 12:19:44,005 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:56.715 ****** 2026-01-22 12:19:44,006 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:56.713 ****** 2026-01-22 12:19:44,034 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,043 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Generate the OLM kustomization file content={{ lookup( 'kubernetes.core.kustomize', dir=cifmw_kustomize_deploy_olm_source_files ) }}, dest={{ cifmw_kustomize_deploy_olm_dest_file }}, mode=0644] *** 2026-01-22 12:19:44,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:56.752 ****** 2026-01-22 12:19:44,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:56.750 ****** 2026-01-22 12:19:44,063 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,073 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Apply the kustomized CRs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, wait=True, src={{ cifmw_kustomize_deploy_olm_dest_file }}] *** 2026-01-22 12:19:44,073 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:56.783 ****** 2026-01-22 12:19:44,074 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:56.781 ****** 2026-01-22 12:19:44,095 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,105 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for the openstack operators Subscription to be created kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, api_version={{ _cifmw_kustomize_deploy_olm_osp_operator_subscription.apiVersion }}, kind=Subscription, namespace={{ _cifmw_kustomize_deploy_olm_osp_operator_subscription.metadata.namespace }}, name={{ _cifmw_kustomize_deploy_olm_osp_operator_subscription.metadata.name }}] *** 2026-01-22 12:19:44,105 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.814 ****** 2026-01-22 12:19:44,105 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.812 ****** 2026-01-22 12:19:44,127 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,136 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Install plan _raw_params=install_plan.yml] ************ 2026-01-22 12:19:44,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.846 ****** 2026-01-22 12:19:44,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.844 ****** 2026-01-22 12:19:44,158 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,169 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for the openstack operators InstallPlan to be finished kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, api_version={{ _install_plan.apiVersion }}, kind=InstallPlan, namespace={{ _install_plan.namespace }}, name={{ _install_plan.name }}] *** 2026-01-22 12:19:44,169 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:56.878 ****** 2026-01-22 12:19:44,169 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:56.876 ****** 2026-01-22 12:19:44,191 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,202 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for cert-manager-operator pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=cert-manager-operator, label_selectors=['name = cert-manager-operator'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,203 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.033) 0:14:56.912 ****** 2026-01-22 12:19:44,203 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.033) 0:14:56.910 ****** 2026-01-22 12:19:44,227 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,239 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for cainjector pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=cert-manager, label_selectors=['app = cainjector'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,239 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.036) 0:14:56.948 ****** 2026-01-22 12:19:44,239 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.036) 0:14:56.946 ****** 2026-01-22 12:19:44,260 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,270 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for webhook pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=cert-manager, label_selectors=['app = webhook'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,270 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.980 ****** 2026-01-22 12:19:44,270 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.978 ****** 2026-01-22 12:19:44,292 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,302 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for certmanager pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=cert-manager, label_selectors=['app = cert-manager'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,302 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.011 ****** 2026-01-22 12:19:44,302 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.009 ****** 2026-01-22 12:19:44,323 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,333 p=33295 u=zuul n=ansible | TASK [openshift_setup : Write catalog source kubeconfig={{ cifmw_openshift_kubeconfig }}, state=present, definition={'apiVersion': 'operators.coreos.com/v1alpha1', 'kind': 'CatalogSource', 'metadata': {'name': '{{ cifmw_openshift_setup_operator_override_catalog_name }}', 'namespace': '{{ cifmw_openshift_setup_operator_override_catalog_namespace }}'}, 'spec': {'displayName': '{{ cifmw_openshift_setup_operator_override_catalog_name }}', 'image': '{{ cifmw_openshift_setup_operator_override_catalog_image }}', 'publisher': 'CI-Framework', 'sourceType': 'grpc', 'updateStrategy': {'registryPoll': {'interval': '10m'}}}}] *** 2026-01-22 12:19:44,333 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:57.042 ****** 2026-01-22 12:19:44,333 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:57.040 ****** 2026-01-22 12:19:44,354 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,365 p=33295 u=zuul n=ansible | TASK [openshift_setup : Patch metallb operator subscription to use custom CatalogSource state=patched, kind=Subscription, api_version=operators.coreos.com/v1alpha1, kubeconfig={{ cifmw_openshift_kubeconfig }}, name=metallb-operator-sub, namespace=metallb-system, definition={'spec': {'source': '{{ cifmw_openshift_setup_operator_override_catalog_name }}'}}, wait=True, wait_timeout=300, wait_condition={'type': 'CatalogSourcesUnhealthy', 'status': 'False'}] *** 2026-01-22 12:19:44,365 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.075 ****** 2026-01-22 12:19:44,365 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.073 ****** 2026-01-22 12:19:44,393 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,402 p=33295 u=zuul n=ansible | TASK [openshift_setup : Patch nmstate operator subscription to use custom CatalogSource state=patched, kind=Subscription, api_version=operators.coreos.com/v1alpha1, kubeconfig={{ cifmw_openshift_kubeconfig }}, name=kubernetes-nmstate-operator, namespace=openshift-nmstate, definition={'spec': {'source': '{{ cifmw_openshift_setup_operator_override_catalog_name }}', 'wait_sleep': 10}}, wait=True, wait_timeout=300, wait_condition={'type': 'CatalogSourcesUnhealthy', 'status': 'False'}] *** 2026-01-22 12:19:44,402 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:57.112 ****** 2026-01-22 12:19:44,403 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:57.110 ****** 2026-01-22 12:19:44,423 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,442 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for controller-manager deployment kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Deployment, namespace=metallb-system, label_selectors=['control-plane = controller-manager'], wait=True, wait_condition={'type': 'Available', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,443 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.040) 0:14:57.152 ****** 2026-01-22 12:19:44,443 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.040) 0:14:57.150 ****** 2026-01-22 12:19:44,463 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,473 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for webhook-server deployment kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Deployment, namespace=metallb-system, label_selectors=['component = webhook-server'], wait=True, wait_condition={'type': 'Available', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,473 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:57.182 ****** 2026-01-22 12:19:44,473 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:57.180 ****** 2026-01-22 12:19:44,498 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,509 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait until NMstate operator resources are deployed kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Deployment, namespace=openshift-nmstate, name=nmstate-operator, wait=True, wait_condition={'type': 'Available', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,509 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.035) 0:14:57.218 ****** 2026-01-22 12:19:44,509 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.035) 0:14:57.216 ****** 2026-01-22 12:19:44,536 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,547 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Generate MetalLB kustomization file content={{ lookup( 'kubernetes.core.kustomize', dir=cifmw_kustomize_deploy_metallb_source_files ) }}, dest={{ cifmw_kustomize_deploy_metallb_dest_file }}, mode=0644] *** 2026-01-22 12:19:44,548 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.038) 0:14:57.257 ****** 2026-01-22 12:19:44,548 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.038) 0:14:57.255 ****** 2026-01-22 12:19:44,569 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,579 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Apply the kustomized MetalLB CRs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, wait=True, src={{ cifmw_kustomize_deploy_metallb_dest_file }}] *** 2026-01-22 12:19:44,579 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.288 ****** 2026-01-22 12:19:44,579 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.286 ****** 2026-01-22 12:19:44,609 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,618 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for MetalLB speaker pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=metallb-system, label_selectors=['component = speaker'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,618 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.039) 0:14:57.328 ****** 2026-01-22 12:19:44,618 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.039) 0:14:57.326 ****** 2026-01-22 12:19:44,639 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,649 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Generate NMstate kustomization file content={{ lookup( 'kubernetes.core.kustomize', dir=cifmw_kustomize_deploy_nmstate_source_files ) }}, dest={{ cifmw_kustomize_deploy_nmstate_dest_file }}, mode=0644] *** 2026-01-22 12:19:44,650 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.359 ****** 2026-01-22 12:19:44,650 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.357 ****** 2026-01-22 12:19:44,671 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,682 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Apply the kustomized NMstate CRs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, wait=True, src={{ cifmw_kustomize_deploy_nmstate_dest_file }}] *** 2026-01-22 12:19:44,683 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.392 ****** 2026-01-22 12:19:44,683 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.390 ****** 2026-01-22 12:19:44,707 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,716 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for NMstate handler pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=openshift-nmstate, label_selectors=['component = kubernetes-nmstate-handler'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.034) 0:14:57.426 ****** 2026-01-22 12:19:44,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.034) 0:14:57.424 ****** 2026-01-22 12:19:44,737 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,746 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for NMstate webhook deployment kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Deployment, namespace=openshift-nmstate, name=nmstate-webhook, wait=True, wait_condition={'type': 'Available', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,746 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.029) 0:14:57.455 ****** 2026-01-22 12:19:44,746 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.029) 0:14:57.454 ****** 2026-01-22 12:19:44,768 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,778 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Check if the OpenStack initialization CRD exists kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, kind=CustomResourceDefinition, name=openstacks.operator.openstack.org] *** 2026-01-22 12:19:44,778 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.488 ****** 2026-01-22 12:19:44,779 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.486 ****** 2026-01-22 12:19:44,801 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,809 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Generate OpenStack initialization kustomization file content={{ lookup( 'kubernetes.core.kustomize', dir=cifmw_kustomize_deploy_openstack_source_files ) }}, dest={{ cifmw_kustomize_deploy_openstack_dest_file }}, mode=0644] *** 2026-01-22 12:19:44,810 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.519 ****** 2026-01-22 12:19:44,810 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.517 ****** 2026-01-22 12:19:44,829 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,842 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Apply the kustomized OpenStack initialization CRs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, wait=True, src={{ cifmw_kustomize_deploy_openstack_dest_file }}] *** 2026-01-22 12:19:44,842 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.551 ****** 2026-01-22 12:19:44,842 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.549 ****** 2026-01-22 12:19:44,868 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,880 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait until OpenStack operators are deployed and ready (new install paradigm) kubeconfig={{ cifmw_openshift_kubeconfig }}, api_version=operator.openstack.org/v1beta1, kind=OpenStack, namespace={{ operator_namespace }}, name=openstack, wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=600] *** 2026-01-22 12:19:44,880 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.038) 0:14:57.590 ****** 2026-01-22 12:19:44,880 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.038) 0:14:57.588 ****** 2026-01-22 12:19:44,907 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,918 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait until OpenStack operators are deployed and ready (old install paradigm) kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Deployment, namespace={{ operator_namespace }}, label_selectors=['{{ item }}'], wait=True, wait_condition={'type': 'Available', 'status': 'True'}, wait_timeout=600] *** 2026-01-22 12:19:44,918 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:57.627 ****** 2026-01-22 12:19:44,918 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:57.625 ****** 2026-01-22 12:19:44,949 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=openstack.org/operator-name) 2026-01-22 12:19:44,953 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=app.kubernetes.io/name=rabbitmq-cluster-operator) 2026-01-22 12:19:44,955 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,969 p=33295 u=zuul n=ansible | TASK [Update containers in deployed OSP operators name=update_containers] ****** 2026-01-22 12:19:44,969 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.050) 0:14:57.678 ****** 2026-01-22 12:19:44,969 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.050) 0:14:57.676 ****** 2026-01-22 12:19:44,989 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,998 p=33295 u=zuul n=ansible | TASK [Update containers in deployed OSP operators using set_openstack_containers role name=set_openstack_containers] *** 2026-01-22 12:19:44,998 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.028) 0:14:57.707 ****** 2026-01-22 12:19:44,998 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.028) 0:14:57.705 ****** 2026-01-22 12:19:45,018 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,027 p=33295 u=zuul n=ansible | TASK [Configure LVMS Storage Class name=ci_lvms_storage] *********************** 2026-01-22 12:19:45,028 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.029) 0:14:57.737 ****** 2026-01-22 12:19:45,028 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.029) 0:14:57.735 ****** 2026-01-22 12:19:45,049 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,061 p=33295 u=zuul n=ansible | TASK [Execute deployment steps name=kustomize_deploy, tasks_from=execute_step.yml, apply={'tags': ['edpm_deploy']}] *** 2026-01-22 12:19:45,061 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:57.770 ****** 2026-01-22 12:19:45,061 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:57.768 ****** 2026-01-22 12:19:45,086 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,097 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Get CA bundle data with retries] ****************** 2026-01-22 12:19:45,097 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.036) 0:14:57.807 ****** 2026-01-22 12:19:45,097 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.036) 0:14:57.805 ****** 2026-01-22 12:19:45,128 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,136 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Set _ca_bundle fact if CA returned from OCP] ****** 2026-01-22 12:19:45,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.039) 0:14:57.846 ****** 2026-01-22 12:19:45,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.039) 0:14:57.844 ****** 2026-01-22 12:19:45,165 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,176 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Creating tls-ca-bundle.pem from CA bundle dest={{ cifmw_install_openstack_ca_file_full_path }}, content={{ _ca_bundle }}, mode=0644] *** 2026-01-22 12:19:45,177 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.039) 0:14:57.886 ****** 2026-01-22 12:19:45,177 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.040) 0:14:57.884 ****** 2026-01-22 12:19:45,198 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,209 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Check if OpenStackControlplane CA file is present path={{ cifmw_install_openstack_ca_file_full_path }}, get_attributes=False, get_checksum=False, get_mime=False] *** 2026-01-22 12:19:45,209 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.032) 0:14:57.919 ****** 2026-01-22 12:19:45,210 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.032) 0:14:57.917 ****** 2026-01-22 12:19:45,231 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,242 p=33295 u=zuul n=ansible | TASK [Call install_ca role to inject OpenStackControlplane CA file if present role=install_ca] *** 2026-01-22 12:19:45,243 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:57.952 ****** 2026-01-22 12:19:45,243 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:57.950 ****** 2026-01-22 12:19:45,264 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,285 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Run nova host discover process _raw_params=oc rsh -n {{ cifmw_openstack_namespace }} nova-cell0-conductor-0 nova-manage cell_v2 discover_hosts --verbose] *** 2026-01-22 12:19:45,285 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.042) 0:14:57.995 ****** 2026-01-22 12:19:45,285 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.042) 0:14:57.993 ****** 2026-01-22 12:19:45,309 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,318 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:19:45,318 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:58.028 ****** 2026-01-22 12:19:45,318 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:58.026 ****** 2026-01-22 12:19:45,340 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,350 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:19:45,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.031) 0:14:58.059 ****** 2026-01-22 12:19:45,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.031) 0:14:58.057 ****** 2026-01-22 12:19:45,374 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,384 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_deploy _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:19:45,384 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.034) 0:14:58.093 ****** 2026-01-22 12:19:45,384 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.034) 0:14:58.091 ****** 2026-01-22 12:19:45,467 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,501 p=33295 u=zuul n=ansible | TASK [Run validations name=validations] **************************************** 2026-01-22 12:19:45,502 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.117) 0:14:58.211 ****** 2026-01-22 12:19:45,502 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.117) 0:14:58.209 ****** 2026-01-22 12:19:45,524 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,535 p=33295 u=zuul n=ansible | TASK [Copy kubeconfig to .kube folder where oc expects it src={{ cifmw_openshift_kubeconfig }}, dest=/home/zuul/.kube/config, remote_src=True] *** 2026-01-22 12:19:45,536 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:58.245 ****** 2026-01-22 12:19:45,536 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:58.243 ****** 2026-01-22 12:19:45,823 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:45,832 p=33295 u=zuul n=ansible | TASK [Make sure all Openstack operators are deployed _raw_params=set -o pipefail && oc get csv -l operators.coreos.com/openstack-operator.openstack-operators -n "openstack-operators" --no-headers=true | grep -i "succeeded" ] *** 2026-01-22 12:19:45,832 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.296) 0:14:58.541 ****** 2026-01-22 12:19:45,832 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.296) 0:14:58.539 ****** 2026-01-22 12:19:46,199 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:46,218 p=33295 u=zuul n=ansible | TASK [Get the name of the control plane deployed by 06-deploy-edpm.yml _raw_params=oc get -n openstack openstackcontrolplane -o name] *** 2026-01-22 12:19:46,218 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:46 +0000 (0:00:00.386) 0:14:58.928 ****** 2026-01-22 12:19:46,219 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:46 +0000 (0:00:00.386) 0:14:58.926 ****** 2026-01-22 12:19:46,598 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:46,608 p=33295 u=zuul n=ansible | TASK [Delete control plane deployed by 06-deploy-edpm.yml _raw_params=oc delete -n openstack "{{ control_plane.stdout }}"] *** 2026-01-22 12:19:46,608 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:46 +0000 (0:00:00.389) 0:14:59.317 ****** 2026-01-22 12:19:46,608 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:46 +0000 (0:00:00.389) 0:14:59.315 ****** 2026-01-22 12:19:47,414 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:47,444 p=33295 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2026-01-22 12:19:47,444 p=33295 u=zuul n=ansible | localhost : ok=187 changed=71 unreachable=0 failed=0 skipped=223 rescued=0 ignored=0 2026-01-22 12:19:47,444 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:47 +0000 (0:00:00.836) 0:15:00.154 ****** 2026-01-22 12:19:47,444 p=33295 u=zuul n=ansible | =============================================================================== 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | edpm_prepare : Wait for OpenStack controlplane to be deployed --------- 323.11s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | install_yamls_makes : Run openstack ----------------------------------- 185.36s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | install_yamls_makes : Run openstack_init ------------------------------ 104.34s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | edpm_prepare : Wait for OpenStack subscription creation ---------------- 60.96s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | ci_setup : Install needed packages ------------------------------------- 43.35s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | run_hook : Run hook without retry - Download needed tools -------------- 34.25s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | edpm_prepare : Wait for control plane to change its status ------------- 30.07s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | install_yamls_makes : Run crc_storage ---------------------------------- 21.83s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | repo_setup : Initialize python venv and install requirements ------------ 9.49s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_local_storage : Perform action in the PV directory ------------------- 5.51s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_setup : Install openshift client ------------------------------------- 5.30s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_yamls_makes : Run netconfig_deploy ------------------------------ 4.63s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | run_hook : Run hook without retry - Fetch compute facts ----------------- 3.03s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_local_storage : Fetch hostnames for all hosts ------------------------ 2.55s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | openshift_setup : Create required namespaces ---------------------------- 1.80s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_ca : Update ca bundle ------------------------------------------- 1.63s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces --- 1.61s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_ca : Update ca bundle ------------------------------------------- 1.57s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_yamls_makes : Run openstack_deploy_prep ------------------------- 1.31s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | openshift_setup : Get internal OpenShift registry route ----------------- 1.17s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:47 +0000 (0:00:00.837) 0:15:00.153 ****** 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | =============================================================================== 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | edpm_prepare ---------------------------------------------------------- 418.37s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_yamls_makes --------------------------------------------------- 319.07s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_setup --------------------------------------------------------------- 50.94s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | run_hook --------------------------------------------------------------- 42.38s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | repo_setup ------------------------------------------------------------- 17.19s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_local_storage ------------------------------------------------------- 12.95s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | openshift_setup --------------------------------------------------------- 9.12s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_gen_kustomize_values ------------------------------------------------- 4.51s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_ca -------------------------------------------------------------- 4.51s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | openshift_login --------------------------------------------------------- 4.00s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_yamls ----------------------------------------------------------- 3.18s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | cifmw_setup ------------------------------------------------------------- 2.55s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | kustomize_deploy -------------------------------------------------------- 1.40s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_openstack_ca ---------------------------------------------------- 1.32s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ansible.builtin.command ------------------------------------------------- 1.23s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ansible.builtin.template ------------------------------------------------ 1.18s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | gather_facts ------------------------------------------------------------ 1.14s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | cifmw_helpers ----------------------------------------------------------- 1.10s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | edpm_deploy_baremetal --------------------------------------------------- 0.76s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | discover_latest_image --------------------------------------------------- 0.68s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | edpm_deploy ------------------------------------------------------------- 0.55s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | networking_mapper ------------------------------------------------------- 0.46s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.shell --------------------------------------------------- 0.39s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.include_role -------------------------------------------- 0.34s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | libvirt_manager --------------------------------------------------------- 0.30s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.copy ---------------------------------------------------- 0.30s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.set_fact ------------------------------------------------ 0.08s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.lineinfile ---------------------------------------------- 0.07s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.include_vars -------------------------------------------- 0.06s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | total ----------------------------------------------------------------- 900.11s 2026-01-22 12:22:23,368 p=38901 u=zuul n=ansible | PLAY [all] ********************************************************************* 2026-01-22 12:22:23,427 p=38901 u=zuul n=ansible | TASK [Deploy standalone name=install_yamls_makes, tasks_from=make_standalone_deploy.yml] *** 2026-01-22 12:22:23,427 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.061) 0:00:00.061 ****** 2026-01-22 12:22:23,427 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.060) 0:00:00.060 ****** 2026-01-22 12:22:23,463 p=38901 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_standalone_deploy_env var=make_standalone_deploy_env] *** 2026-01-22 12:22:23,464 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.036) 0:00:00.098 ****** 2026-01-22 12:22:23,464 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.036) 0:00:00.097 ****** 2026-01-22 12:22:23,482 p=38901 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:22:23,493 p=38901 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_standalone_deploy_params var=make_standalone_deploy_params] *** 2026-01-22 12:22:23,493 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.029) 0:00:00.127 ****** 2026-01-22 12:22:23,493 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.029) 0:00:00.126 ****** 2026-01-22 12:22:23,543 p=38901 u=zuul n=ansible | ok: [localhost] => make_standalone_deploy_params: BARBICAN_SERVICE_ENABLED: 'false' DATAPLANE_DNS_SERVER: 192.168.122.10 DNS_DOMAIN: ooo.test EDPM_COMPUTE_CEPH_ENABLED: 'false' EDPM_COMPUTE_CEPH_NOVA: 'false' EDPM_COMPUTE_NETWORK_IP: 192.168.122 GATEWAY: 192.168.122.10 HOST_PRIMARY_RESOLV_CONF_ENTRY: 192.168.122.10 IP: 192.168.122.100 IP_ADRESS_SUFFIX: '100' NTP_SERVER: pool.ntp.org OCTAVIA_ENABLED: 'true' OS_NET_CONFIG_IFACE: nic2 REPO_SETUP_CMDS: /home/zuul/cdn_subscription_repos.sh SSH_KEY_FILE: /home/zuul/.ssh/id_rsa STANDALONE_VM: 'false' SWIFT_REPLICATED: 'false' TELEMETRY_ENABLED: 'true' TLS_ENABLED: 'true' 2026-01-22 12:22:23,555 p=38901 u=zuul n=ansible | TASK [install_yamls_makes : Run standalone_deploy output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup, script=make standalone_deploy, dry_run={{ make_standalone_deploy_dryrun|default(false)|bool }}, extra_args={{ dict((make_standalone_deploy_env|default({})), **(make_standalone_deploy_params|default({}))) }}] *** 2026-01-22 12:22:23,555 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.061) 0:00:00.189 ****** 2026-01-22 12:22:23,555 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.061) 0:00:00.188 ****** 2026-01-22 12:22:23,613 p=38901 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_011_run_standalone.log 2026-01-22 13:14:26,973 p=38901 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_standalone_deploy_until | default(true) }} 2026-01-22 13:14:27,034 p=38901 u=zuul n=ansible | changed: [localhost] 2026-01-22 13:14:27,124 p=38901 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2026-01-22 13:14:27,124 p=38901 u=zuul n=ansible | localhost : ok=2 changed=1 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0 2026-01-22 13:14:27,124 p=38901 u=zuul n=ansible | Thursday 22 January 2026 13:14:27 +0000 (0:52:03.569) 0:52:03.758 ****** 2026-01-22 13:14:27,124 p=38901 u=zuul n=ansible | =============================================================================== 2026-01-22 13:14:27,124 p=38901 u=zuul n=ansible | install_yamls_makes : Run standalone_deploy -------------------------- 3123.57s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | install_yamls_makes : Debug make_standalone_deploy_params --------------- 0.06s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | Deploy standalone ------------------------------------------------------- 0.04s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | install_yamls_makes : Debug make_standalone_deploy_env ------------------ 0.03s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | Thursday 22 January 2026 13:14:27 +0000 (0:52:03.569) 0:52:03.758 ****** 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | =============================================================================== 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | install_yamls_makes -------------------------------------------------- 3123.66s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | ansible.builtin.include_role -------------------------------------------- 0.04s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | total ---------------------------------------------------------------- 3123.70s home/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/0000755000175000017500000000000015134437262025563 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/must-gather.logs0000644000175000017500000000233015134432027030701 0ustar zuulzuul[must-gather ] OUT 2026-01-22T14:28:39.085688692Z Using must-gather plug-in image: quay.io/openstack-k8s-operators/openstack-must-gather:latest When opening a support case, bugzilla, or issue please include the following summary data along with any other requested information: ClusterID: ClientVersion: 4.20.10 ClusterVersion: Stable at "4.18.1" ClusterOperators: clusteroperator/machine-config is degraded because Failed to resync 4.18.1 because: error during syncRequiredMachineConfigPools: [context deadline exceeded, error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1)] clusteroperator/cloud-credential is missing clusteroperator/cluster-autoscaler is missing clusteroperator/insights is missing clusteroperator/monitoring is missing clusteroperator/storage is missing [must-gather ] OUT 2026-01-22T14:28:39.137139995Z namespace/openshift-must-gather-n4qb8 created [must-gather ] OUT 2026-01-22T14:28:39.144304072Z clusterrolebinding.rbac.authorization.k8s.io/must-gather-bnnhh created [must-gather ] OUT 2026-01-22T14:28:39.313093327Z pod for plug-in image quay.io/openstack-k8s-operators/openstack-must-gather:latest created home/zuul/zuul-output/logs/ci-framework-data/logs/openstack-must-gather/timestamp0000644000175000017500000000006715134432027027506 0ustar zuulzuul2026-01-22 14:28:39.152630138 +0000 UTC m=+0.201566418 home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_000_run_hook_without_retry.log0000644000175000017500000002334115134411321031302 0ustar zuulzuul[WARNING]: Found variable using reserved name: namespace PLAY [Download tools] ********************************************************** TASK [download_tools : Install build dependencies name=['jq', 'skopeo', 'sqlite', 'httpd-tools', 'virt-install', 'gcc', 'python3-jinja2', 'xmlstarlet', 'openssl']] *** Thursday 22 January 2026 12:06:07 +0000 (0:00:00.046) 0:00:00.046 ****** Thursday 22 January 2026 12:06:07 +0000 (0:00:00.045) 0:00:00.045 ****** changed: [localhost] TASK [download_tools : Set opm download url suffix opm_url_suffix=latest/download] *** Thursday 22 January 2026 12:06:13 +0000 (0:00:05.953) 0:00:06.000 ****** Thursday 22 January 2026 12:06:13 +0000 (0:00:05.953) 0:00:05.999 ****** skipping: [localhost] TASK [download_tools : Set opm download url suffix opm_url_suffix=download/{{ opm_version }}] *** Thursday 22 January 2026 12:06:13 +0000 (0:00:00.053) 0:00:06.054 ****** Thursday 22 January 2026 12:06:13 +0000 (0:00:00.053) 0:00:06.053 ****** ok: [localhost] TASK [download_tools : Create $HOME/bin dir path={{ lookup('env', 'HOME') }}/bin, state=directory, mode=0755] *** Thursday 22 January 2026 12:06:13 +0000 (0:00:00.076) 0:00:06.130 ****** Thursday 22 January 2026 12:06:13 +0000 (0:00:00.076) 0:00:06.130 ****** ok: [localhost] TASK [download_tools : Download opm url=https://github.com/operator-framework/operator-registry/releases/{{ opm_url_suffix }}/linux-amd64-opm, dest={{ lookup('env', 'HOME') }}/bin/opm, mode=0755, timeout=30] *** Thursday 22 January 2026 12:06:14 +0000 (0:00:00.341) 0:00:06.472 ****** Thursday 22 January 2026 12:06:14 +0000 (0:00:00.341) 0:00:06.471 ****** changed: [localhost] TASK [download_tools : Get version from sdk_version _sdk_version={{ sdk_version | regex_search('v(.*)', '\1') | first }}] *** Thursday 22 January 2026 12:06:15 +0000 (0:00:00.970) 0:00:07.442 ****** Thursday 22 January 2026 12:06:15 +0000 (0:00:00.970) 0:00:07.442 ****** ok: [localhost] TASK [download_tools : Set operator-sdk file for version < 1.3.0 _operator_sdk_file=operator-sdk-{{ sdk_version }}-x86_64-linux-gnu] *** Thursday 22 January 2026 12:06:15 +0000 (0:00:00.047) 0:00:07.490 ****** Thursday 22 January 2026 12:06:15 +0000 (0:00:00.047) 0:00:07.489 ****** skipping: [localhost] TASK [download_tools : Set operator-sdk file for version >= 1.3.0 _operator_sdk_file=operator-sdk_linux_amd64] *** Thursday 22 January 2026 12:06:15 +0000 (0:00:00.033) 0:00:07.524 ****** Thursday 22 January 2026 12:06:15 +0000 (0:00:00.033) 0:00:07.523 ****** ok: [localhost] TASK [download_tools : Download operator-sdk url=https://github.com/operator-framework/operator-sdk/releases/download/{{ sdk_version }}/{{ _operator_sdk_file }}, dest={{ lookup('env', 'HOME') }}/bin/operator-sdk, mode=0755, force=True, timeout=30] *** Thursday 22 January 2026 12:06:15 +0000 (0:00:00.038) 0:00:07.563 ****** Thursday 22 January 2026 12:06:15 +0000 (0:00:00.038) 0:00:07.562 ****** changed: [localhost] TASK [download_tools : Download and extract kustomize src=https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2F{{ kustomize_version }}/kustomize_{{ kustomize_version }}_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True] *** Thursday 22 January 2026 12:06:16 +0000 (0:00:01.399) 0:00:08.963 ****** Thursday 22 January 2026 12:06:16 +0000 (0:00:01.399) 0:00:08.962 ****** changed: [localhost] TASK [download_tools : Download kubectl url=https://dl.k8s.io/release/{{ kubectl_version }}/bin/linux/amd64/kubectl, dest={{ lookup('env', 'HOME') }}/bin/kubectl, mode=0755, timeout=30] *** Thursday 22 January 2026 12:06:18 +0000 (0:00:01.579) 0:00:10.543 ****** Thursday 22 January 2026 12:06:18 +0000 (0:00:01.579) 0:00:10.542 ****** ok: [localhost] TASK [download_tools : Download kuttl url=https://github.com/kudobuilder/kuttl/releases/download/v{{ kuttl_version }}/kubectl-kuttl_{{ kuttl_version }}_linux_x86_64, dest={{ lookup('env', 'HOME') }}/bin/kubectl-kuttl, mode=0755, timeout=30] *** Thursday 22 January 2026 12:06:18 +0000 (0:00:00.412) 0:00:10.955 ****** Thursday 22 January 2026 12:06:18 +0000 (0:00:00.412) 0:00:10.954 ****** changed: [localhost] TASK [download_tools : Download chainsaw src=https://github.com/kyverno/chainsaw/releases/download/v{{ chainsaw_version }}/chainsaw_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True, extra_opts=['--exclude', 'README.md', '--exclude', 'LICENSE']] *** Thursday 22 January 2026 12:06:20 +0000 (0:00:01.788) 0:00:12.744 ****** Thursday 22 January 2026 12:06:20 +0000 (0:00:01.788) 0:00:12.743 ****** changed: [localhost] TASK [download_tools : Download and extract yq src=https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True, mode=0755] *** Thursday 22 January 2026 12:06:23 +0000 (0:00:03.226) 0:00:15.970 ****** Thursday 22 January 2026 12:06:23 +0000 (0:00:03.226) 0:00:15.970 ****** changed: [localhost] TASK [download_tools : Link yq_linux_amd64 as yq src={{ lookup('env', 'HOME') }}/bin/yq_linux_amd64, dest={{ lookup('env', 'HOME') }}/bin/yq, state=link] *** Thursday 22 January 2026 12:06:24 +0000 (0:00:01.257) 0:00:17.227 ****** Thursday 22 January 2026 12:06:24 +0000 (0:00:01.257) 0:00:17.227 ****** changed: [localhost] TASK [download_tools : Deinstall golang state=absent, name=['golang-bin', 'golang-src', 'golang']] *** Thursday 22 January 2026 12:06:25 +0000 (0:00:00.221) 0:00:17.449 ****** Thursday 22 January 2026 12:06:25 +0000 (0:00:00.221) 0:00:17.448 ****** ok: [localhost] TASK [download_tools : Delete old go version installed from upstream path={{ item }}, state=absent] *** Thursday 22 January 2026 12:06:26 +0000 (0:00:01.239) 0:00:18.689 ****** Thursday 22 January 2026 12:06:26 +0000 (0:00:01.239) 0:00:18.688 ****** ok: [localhost] => (item=/usr/local/go) ok: [localhost] => (item=/home/zuul/bin/go) ok: [localhost] => (item=/home/zuul/bin/gofmt) ok: [localhost] => (item=/usr/local/bin/go) ok: [localhost] => (item=/usr/local/bin/gofmt) TASK [download_tools : Download and extract golang src=https://golang.org/dl/go{{ go_version }}.linux-amd64.tar.gz, dest=/usr/local, remote_src=True, extra_opts=['--exclude', 'go/misc', '--exclude', 'go/pkg/linux_amd64_race', '--exclude', 'go/test']] *** Thursday 22 January 2026 12:06:27 +0000 (0:00:00.967) 0:00:19.656 ****** Thursday 22 January 2026 12:06:27 +0000 (0:00:00.967) 0:00:19.655 ****** changed: [localhost] TASK [download_tools : Set alternatives link to installed go version _raw_params=set -e update-alternatives --install /usr/local/bin/{{ item }} {{ item }} /usr/local/go/bin/{{ item }} 1 ] *** Thursday 22 January 2026 12:06:40 +0000 (0:00:13.090) 0:00:32.746 ****** Thursday 22 January 2026 12:06:40 +0000 (0:00:13.090) 0:00:32.745 ****** changed: [localhost] => (item=go) changed: [localhost] => (item=gofmt) TASK [download_tools : Clean bash cache msg=When move from rpm to upstream version, make sure to clean bash cache using `hash -d go`] *** Thursday 22 January 2026 12:06:41 +0000 (0:00:00.667) 0:00:33.414 ****** Thursday 22 January 2026 12:06:41 +0000 (0:00:00.667) 0:00:33.413 ****** ok: [localhost] => msg: When move from rpm to upstream version, make sure to clean bash cache using `hash -d go` PLAY RECAP ********************************************************************* localhost : ok=18 changed=10 unreachable=0 failed=0 skipped=2 rescued=0 ignored=0 Thursday 22 January 2026 12:06:41 +0000 (0:00:00.081) 0:00:33.496 ****** =============================================================================== download_tools : Download and extract golang --------------------------- 13.09s download_tools : Install build dependencies ----------------------------- 5.95s download_tools : Download chainsaw -------------------------------------- 3.23s download_tools : Download kuttl ----------------------------------------- 1.79s download_tools : Download and extract kustomize ------------------------- 1.58s download_tools : Download operator-sdk ---------------------------------- 1.40s download_tools : Download and extract yq -------------------------------- 1.26s download_tools : Deinstall golang --------------------------------------- 1.24s download_tools : Download opm ------------------------------------------- 0.97s download_tools : Delete old go version installed from upstream ---------- 0.97s download_tools : Set alternatives link to installed go version ---------- 0.67s download_tools : Download kubectl --------------------------------------- 0.41s download_tools : Create $HOME/bin dir ----------------------------------- 0.34s download_tools : Link yq_linux_amd64 as yq ------------------------------ 0.22s download_tools : Clean bash cache --------------------------------------- 0.08s download_tools : Set opm download url suffix ---------------------------- 0.08s download_tools : Set opm download url suffix ---------------------------- 0.05s download_tools : Get version from sdk_version --------------------------- 0.05s download_tools : Set operator-sdk file for version >= 1.3.0 ------------- 0.04s download_tools : Set operator-sdk file for version < 1.3.0 -------------- 0.03s Thursday 22 January 2026 12:06:41 +0000 (0:00:00.082) 0:00:33.496 ****** =============================================================================== download_tools --------------------------------------------------------- 33.45s ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ total ------------------------------------------------------------------ 33.45s home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_001_fetch_openshift.log0000644000175000017500000000035215134411323027616 0ustar zuulzuulWARNING: Using insecure TLS client config. Setting this option is not supported! Login successful. You have access to 65 projects, the list has been suppressed. You can list all projects with 'oc projects' Using project "default". home/zuul/zuul-output/logs/ci-framework-data/logs/pre_infra_download_needed_tools.log0000644000175000017500000003675015134411321030420 0ustar zuulzuul2026-01-22 12:06:07,767 p=34247 u=zuul n=ansible | [WARNING]: Found variable using reserved name: namespace 2026-01-22 12:06:07,767 p=34247 u=zuul n=ansible | PLAY [Download tools] ********************************************************** 2026-01-22 12:06:07,809 p=34247 u=zuul n=ansible | TASK [download_tools : Install build dependencies name=['jq', 'skopeo', 'sqlite', 'httpd-tools', 'virt-install', 'gcc', 'python3-jinja2', 'xmlstarlet', 'openssl']] *** 2026-01-22 12:06:07,810 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:07 +0000 (0:00:00.046) 0:00:00.046 ****** 2026-01-22 12:06:07,810 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:07 +0000 (0:00:00.045) 0:00:00.045 ****** 2026-01-22 12:06:13,750 p=34247 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:13,763 p=34247 u=zuul n=ansible | TASK [download_tools : Set opm download url suffix opm_url_suffix=latest/download] *** 2026-01-22 12:06:13,763 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:13 +0000 (0:00:05.953) 0:00:06.000 ****** 2026-01-22 12:06:13,763 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:13 +0000 (0:00:05.953) 0:00:05.999 ****** 2026-01-22 12:06:13,797 p=34247 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:13,817 p=34247 u=zuul n=ansible | TASK [download_tools : Set opm download url suffix opm_url_suffix=download/{{ opm_version }}] *** 2026-01-22 12:06:13,817 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:13 +0000 (0:00:00.053) 0:00:06.054 ****** 2026-01-22 12:06:13,817 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:13 +0000 (0:00:00.053) 0:00:06.053 ****** 2026-01-22 12:06:13,853 p=34247 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:13,894 p=34247 u=zuul n=ansible | TASK [download_tools : Create $HOME/bin dir path={{ lookup('env', 'HOME') }}/bin, state=directory, mode=0755] *** 2026-01-22 12:06:13,894 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:13 +0000 (0:00:00.076) 0:00:06.130 ****** 2026-01-22 12:06:13,894 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:13 +0000 (0:00:00.076) 0:00:06.130 ****** 2026-01-22 12:06:14,219 p=34247 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:14,235 p=34247 u=zuul n=ansible | TASK [download_tools : Download opm url=https://github.com/operator-framework/operator-registry/releases/{{ opm_url_suffix }}/linux-amd64-opm, dest={{ lookup('env', 'HOME') }}/bin/opm, mode=0755, timeout=30] *** 2026-01-22 12:06:14,235 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:14 +0000 (0:00:00.341) 0:00:06.472 ****** 2026-01-22 12:06:14,235 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:14 +0000 (0:00:00.341) 0:00:06.471 ****** 2026-01-22 12:06:15,194 p=34247 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:15,206 p=34247 u=zuul n=ansible | TASK [download_tools : Get version from sdk_version _sdk_version={{ sdk_version | regex_search('v(.*)', '\1') | first }}] *** 2026-01-22 12:06:15,206 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:15 +0000 (0:00:00.970) 0:00:07.442 ****** 2026-01-22 12:06:15,206 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:15 +0000 (0:00:00.970) 0:00:07.442 ****** 2026-01-22 12:06:15,237 p=34247 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:15,253 p=34247 u=zuul n=ansible | TASK [download_tools : Set operator-sdk file for version < 1.3.0 _operator_sdk_file=operator-sdk-{{ sdk_version }}-x86_64-linux-gnu] *** 2026-01-22 12:06:15,254 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:15 +0000 (0:00:00.047) 0:00:07.490 ****** 2026-01-22 12:06:15,254 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:15 +0000 (0:00:00.047) 0:00:07.489 ****** 2026-01-22 12:06:15,273 p=34247 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:15,287 p=34247 u=zuul n=ansible | TASK [download_tools : Set operator-sdk file for version >= 1.3.0 _operator_sdk_file=operator-sdk_linux_amd64] *** 2026-01-22 12:06:15,287 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:15 +0000 (0:00:00.033) 0:00:07.524 ****** 2026-01-22 12:06:15,288 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:15 +0000 (0:00:00.033) 0:00:07.523 ****** 2026-01-22 12:06:15,309 p=34247 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:15,326 p=34247 u=zuul n=ansible | TASK [download_tools : Download operator-sdk url=https://github.com/operator-framework/operator-sdk/releases/download/{{ sdk_version }}/{{ _operator_sdk_file }}, dest={{ lookup('env', 'HOME') }}/bin/operator-sdk, mode=0755, force=True, timeout=30] *** 2026-01-22 12:06:15,326 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:15 +0000 (0:00:00.038) 0:00:07.563 ****** 2026-01-22 12:06:15,327 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:15 +0000 (0:00:00.038) 0:00:07.562 ****** 2026-01-22 12:06:16,715 p=34247 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:16,726 p=34247 u=zuul n=ansible | TASK [download_tools : Download and extract kustomize src=https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2F{{ kustomize_version }}/kustomize_{{ kustomize_version }}_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True] *** 2026-01-22 12:06:16,726 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:16 +0000 (0:00:01.399) 0:00:08.963 ****** 2026-01-22 12:06:16,726 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:16 +0000 (0:00:01.399) 0:00:08.962 ****** 2026-01-22 12:06:18,296 p=34247 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:18,306 p=34247 u=zuul n=ansible | TASK [download_tools : Download kubectl url=https://dl.k8s.io/release/{{ kubectl_version }}/bin/linux/amd64/kubectl, dest={{ lookup('env', 'HOME') }}/bin/kubectl, mode=0755, timeout=30] *** 2026-01-22 12:06:18,306 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:18 +0000 (0:00:01.579) 0:00:10.543 ****** 2026-01-22 12:06:18,306 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:18 +0000 (0:00:01.579) 0:00:10.542 ****** 2026-01-22 12:06:18,707 p=34247 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:18,718 p=34247 u=zuul n=ansible | TASK [download_tools : Download kuttl url=https://github.com/kudobuilder/kuttl/releases/download/v{{ kuttl_version }}/kubectl-kuttl_{{ kuttl_version }}_linux_x86_64, dest={{ lookup('env', 'HOME') }}/bin/kubectl-kuttl, mode=0755, timeout=30] *** 2026-01-22 12:06:18,718 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:18 +0000 (0:00:00.412) 0:00:10.955 ****** 2026-01-22 12:06:18,718 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:18 +0000 (0:00:00.412) 0:00:10.954 ****** 2026-01-22 12:06:20,491 p=34247 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:20,507 p=34247 u=zuul n=ansible | TASK [download_tools : Download chainsaw src=https://github.com/kyverno/chainsaw/releases/download/v{{ chainsaw_version }}/chainsaw_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True, extra_opts=['--exclude', 'README.md', '--exclude', 'LICENSE']] *** 2026-01-22 12:06:20,507 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:20 +0000 (0:00:01.788) 0:00:12.744 ****** 2026-01-22 12:06:20,508 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:20 +0000 (0:00:01.788) 0:00:12.743 ****** 2026-01-22 12:06:23,719 p=34247 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:23,733 p=34247 u=zuul n=ansible | TASK [download_tools : Download and extract yq src=https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64.tar.gz, dest={{ lookup('env', 'HOME') }}/bin/, remote_src=True, mode=0755] *** 2026-01-22 12:06:23,734 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:23 +0000 (0:00:03.226) 0:00:15.970 ****** 2026-01-22 12:06:23,734 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:23 +0000 (0:00:03.226) 0:00:15.970 ****** 2026-01-22 12:06:24,980 p=34247 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:24,991 p=34247 u=zuul n=ansible | TASK [download_tools : Link yq_linux_amd64 as yq src={{ lookup('env', 'HOME') }}/bin/yq_linux_amd64, dest={{ lookup('env', 'HOME') }}/bin/yq, state=link] *** 2026-01-22 12:06:24,991 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:24 +0000 (0:00:01.257) 0:00:17.227 ****** 2026-01-22 12:06:24,991 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:24 +0000 (0:00:01.257) 0:00:17.227 ****** 2026-01-22 12:06:25,200 p=34247 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:25,212 p=34247 u=zuul n=ansible | TASK [download_tools : Deinstall golang state=absent, name=['golang-bin', 'golang-src', 'golang']] *** 2026-01-22 12:06:25,213 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:25 +0000 (0:00:00.221) 0:00:17.449 ****** 2026-01-22 12:06:25,213 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:25 +0000 (0:00:00.221) 0:00:17.448 ****** 2026-01-22 12:06:26,439 p=34247 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:26,452 p=34247 u=zuul n=ansible | TASK [download_tools : Delete old go version installed from upstream path={{ item }}, state=absent] *** 2026-01-22 12:06:26,452 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:26 +0000 (0:00:01.239) 0:00:18.689 ****** 2026-01-22 12:06:26,452 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:26 +0000 (0:00:01.239) 0:00:18.688 ****** 2026-01-22 12:06:26,648 p=34247 u=zuul n=ansible | ok: [localhost] => (item=/usr/local/go) 2026-01-22 12:06:26,844 p=34247 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/bin/go) 2026-01-22 12:06:27,033 p=34247 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/bin/gofmt) 2026-01-22 12:06:27,233 p=34247 u=zuul n=ansible | ok: [localhost] => (item=/usr/local/bin/go) 2026-01-22 12:06:27,406 p=34247 u=zuul n=ansible | ok: [localhost] => (item=/usr/local/bin/gofmt) 2026-01-22 12:06:27,419 p=34247 u=zuul n=ansible | TASK [download_tools : Download and extract golang src=https://golang.org/dl/go{{ go_version }}.linux-amd64.tar.gz, dest=/usr/local, remote_src=True, extra_opts=['--exclude', 'go/misc', '--exclude', 'go/pkg/linux_amd64_race', '--exclude', 'go/test']] *** 2026-01-22 12:06:27,419 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:27 +0000 (0:00:00.967) 0:00:19.656 ****** 2026-01-22 12:06:27,419 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:27 +0000 (0:00:00.967) 0:00:19.655 ****** 2026-01-22 12:06:40,494 p=34247 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:40,509 p=34247 u=zuul n=ansible | TASK [download_tools : Set alternatives link to installed go version _raw_params=set -e update-alternatives --install /usr/local/bin/{{ item }} {{ item }} /usr/local/go/bin/{{ item }} 1 ] *** 2026-01-22 12:06:40,509 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:40 +0000 (0:00:13.090) 0:00:32.746 ****** 2026-01-22 12:06:40,509 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:40 +0000 (0:00:13.090) 0:00:32.745 ****** 2026-01-22 12:06:40,981 p=34247 u=zuul n=ansible | changed: [localhost] => (item=go) 2026-01-22 12:06:41,164 p=34247 u=zuul n=ansible | changed: [localhost] => (item=gofmt) 2026-01-22 12:06:41,177 p=34247 u=zuul n=ansible | TASK [download_tools : Clean bash cache msg=When move from rpm to upstream version, make sure to clean bash cache using `hash -d go`] *** 2026-01-22 12:06:41,177 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.667) 0:00:33.414 ****** 2026-01-22 12:06:41,177 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.667) 0:00:33.413 ****** 2026-01-22 12:06:41,191 p=34247 u=zuul n=ansible | ok: [localhost] => msg: When move from rpm to upstream version, make sure to clean bash cache using `hash -d go` 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | localhost : ok=18 changed=10 unreachable=0 failed=0 skipped=2 rescued=0 ignored=0 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.081) 0:00:33.496 ****** 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | =============================================================================== 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Download and extract golang --------------------------- 13.09s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Install build dependencies ----------------------------- 5.95s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Download chainsaw -------------------------------------- 3.23s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Download kuttl ----------------------------------------- 1.79s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Download and extract kustomize ------------------------- 1.58s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Download operator-sdk ---------------------------------- 1.40s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Download and extract yq -------------------------------- 1.26s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Deinstall golang --------------------------------------- 1.24s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Download opm ------------------------------------------- 0.97s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Delete old go version installed from upstream ---------- 0.97s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Set alternatives link to installed go version ---------- 0.67s 2026-01-22 12:06:41,259 p=34247 u=zuul n=ansible | download_tools : Download kubectl --------------------------------------- 0.41s 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | download_tools : Create $HOME/bin dir ----------------------------------- 0.34s 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | download_tools : Link yq_linux_amd64 as yq ------------------------------ 0.22s 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | download_tools : Clean bash cache --------------------------------------- 0.08s 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | download_tools : Set opm download url suffix ---------------------------- 0.08s 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | download_tools : Set opm download url suffix ---------------------------- 0.05s 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | download_tools : Get version from sdk_version --------------------------- 0.05s 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | download_tools : Set operator-sdk file for version >= 1.3.0 ------------- 0.04s 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | download_tools : Set operator-sdk file for version < 1.3.0 -------------- 0.03s 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.082) 0:00:33.496 ****** 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | =============================================================================== 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | download_tools --------------------------------------------------------- 33.45s 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2026-01-22 12:06:41,260 p=34247 u=zuul n=ansible | total ------------------------------------------------------------------ 33.45s home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_000_check_for_oc.log0000644000175000017500000000002215134432025027045 0ustar zuulzuul/home/zuul/bin/oc home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_000_run_openstack_must_gather.log0000644000175000017500000036473715134437242031756 0ustar zuulzuul[must-gather ] OUT 2026-01-22T14:28:39.085068006Z Using must-gather plug-in image: quay.io/openstack-k8s-operators/openstack-must-gather:latest When opening a support case, bugzilla, or issue please include the following summary data along with any other requested information: ClusterID: ClientVersion: 4.20.10 ClusterVersion: Stable at "4.18.1" ClusterOperators: clusteroperator/machine-config is degraded because Failed to resync 4.18.1 because: error during syncRequiredMachineConfigPools: [context deadline exceeded, error MachineConfigPool master is not ready, retrying. Status: (pool degraded: true total: 1, ready 0, updated: 0, unavailable: 1)] clusteroperator/cloud-credential is missing clusteroperator/cluster-autoscaler is missing clusteroperator/insights is missing clusteroperator/monitoring is missing clusteroperator/storage is missing [must-gather ] OUT 2026-01-22T14:28:39.137139995Z namespace/openshift-must-gather-n4qb8 created [must-gather ] OUT 2026-01-22T14:28:39.144304072Z clusterrolebinding.rbac.authorization.k8s.io/must-gather-bnnhh created [must-gather ] OUT 2026-01-22T14:28:39.313093327Z pod for plug-in image quay.io/openstack-k8s-operators/openstack-must-gather:latest created [must-gather-9wq4j] POD 2026-01-22T14:28:48.182325311Z [disk usage checker] Started [must-gather-9wq4j] POD 2026-01-22T14:28:48.186993903Z [disk usage checker] Volume usage percentage: current = 50 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:28:50.466599438Z Trigger GMR for Service cinder [must-gather-9wq4j] POD 2026-01-22T14:28:50.634112215Z Trigger GMR for Nova services [must-gather-9wq4j] POD 2026-01-22T14:28:51.870824315Z Will retrieve SOS reports from nodes crc [must-gather-9wq4j] POD 2026-01-22T14:28:51.870824315Z Generating SOS Report for crc [must-gather-9wq4j] POD 2026-01-22T14:28:51.870824315Z Journal size limit not set or invalid: ignoring [must-gather-9wq4j] POD 2026-01-22T14:28:52.062261030Z Starting pod/crc-debug-jq7sj ... [must-gather-9wq4j] POD 2026-01-22T14:28:52.062261030Z To use host binaries, run `chroot /host` [must-gather-9wq4j] POD 2026-01-22T14:28:52.145255912Z Generating SOS Report for EDPM standalone.ooo.test [must-gather-9wq4j] POD 2026-01-22T14:28:52.353642058Z Warning: Permanently added '192.168.122.100' (ED25519) to the list of known hosts. [must-gather-9wq4j] POD 2026-01-22T14:28:53.197308978Z [disk usage checker] Volume usage percentage: current = 50 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:28:53.613526303Z +--------------------------------------+----------------+------------------------+----------+---------+-------+----------------------------+ [must-gather-9wq4j] POD 2026-01-22T14:28:53.613526303Z | ID | Binary | Host | Zone | Status | State | Updated At | [must-gather-9wq4j] POD 2026-01-22T14:28:53.613526303Z +--------------------------------------+----------------+------------------------+----------+---------+-------+----------------------------+ [must-gather-9wq4j] POD 2026-01-22T14:28:53.613526303Z | 0b11528d-577e-4274-8ed0-ec1b4916394e | nova-scheduler | nova-scheduler-0 | internal | enabled | up | 2026-01-22T14:28:47.000000 | [must-gather-9wq4j] POD 2026-01-22T14:28:53.613526303Z | 7ae83366-5ec1-4d9f-9d4a-45f1e86110e1 | nova-conductor | nova-cell0-conductor-0 | internal | enabled | up | 2026-01-22T14:28:46.000000 | [must-gather-9wq4j] POD 2026-01-22T14:28:53.613526303Z | f5843a7d-e75b-4579-ae80-4e61cd7e250b | nova-compute | standalone.ooo.test | nova | enabled | up | 2026-01-22T14:28:45.000000 | [must-gather-9wq4j] POD 2026-01-22T14:28:53.613526303Z | c255346f-7ba6-4c18-a00f-19f6831027e7 | nova-conductor | nova-cell1-conductor-0 | internal | enabled | up | 2026-01-22T14:28:46.000000 | [must-gather-9wq4j] POD 2026-01-22T14:28:53.613526303Z +--------------------------------------+----------------+------------------------+----------+---------+-------+----------------------------+ [must-gather-9wq4j] POD 2026-01-22T14:28:53.677021633Z OMC mode: Collecting OLM resources (subscriptions, CSVs, etc.) in OMC format [must-gather-9wq4j] POD 2026-01-22T14:28:54.795254384Z Gathering data for ns/openstack... [must-gather-9wq4j] POD 2026-01-22T14:28:55.140745666Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:28:55.620798271Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:28:55.644225155Z Gathering secrets in namespace openstack [must-gather-9wq4j] POD 2026-01-22T14:28:56.238271381Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:28:58.206414428Z [disk usage checker] Volume usage percentage: current = 50 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:28:58.534735813Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:03.213842042Z [disk usage checker] Volume usage percentage: current = 50 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:08.225101123Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:11.480434630Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:29:13.243817696Z [disk usage checker] Volume usage percentage: current = 50 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:17.476733268Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:18.248345346Z [disk usage checker] Volume usage percentage: current = 50 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:19.503143857Z Gathering data for ns/metallb-system... [must-gather-9wq4j] POD 2026-01-22T14:29:21.674457904Z warning: Container container-00 is unable to start due to an error: Back-off pulling image "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296" [must-gather-9wq4j] POD 2026-01-22T14:29:23.287333593Z [disk usage checker] Volume usage percentage: current = 50 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:24.942516082Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:29:26.401933232Z Trying to pull registry.redhat.io/rhel9/support-tools:latest... [must-gather-9wq4j] POD 2026-01-22T14:29:27.097084853Z Getting image source signatures [must-gather-9wq4j] POD 2026-01-22T14:29:27.581382669Z Checking if image destination supports signatures [must-gather-9wq4j] POD 2026-01-22T14:29:27.593602535Z Copying blob sha256:b98104ab0e1239a911fc1ca3c8589101c7fa3eb521b2c4b1fb1120038f55fbe9 [must-gather-9wq4j] POD 2026-01-22T14:29:27.593602535Z Copying blob sha256:34b5c851d9cf523f162ceb72c260f1c6d1e556f8f4422e15258572766f2afc28 [must-gather-9wq4j] POD 2026-01-22T14:29:28.043662330Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:28.806016556Z [disk usage checker] Volume usage percentage: current = 50 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:30.653484514Z Copying config sha256:907c6f8a1bbc29560332663e8e9c85244e317088310a19891a847689ebec5226 [must-gather-9wq4j] POD 2026-01-22T14:29:30.664095624Z Writing manifest to image destination [must-gather-9wq4j] POD 2026-01-22T14:29:30.664095624Z Storing signatures [must-gather-9wq4j] POD 2026-01-22T14:29:30.748047344Z 907c6f8a1bbc29560332663e8e9c85244e317088310a19891a847689ebec5226 [must-gather-9wq4j] POD 2026-01-22T14:29:30.763056339Z .toolboxrc file detected, overriding defaults... [must-gather-9wq4j] POD 2026-01-22T14:29:30.980848521Z Checking if there is a newer version of registry.redhat.io/rhel9/support-tools available... [must-gather-9wq4j] POD 2026-01-22T14:29:33.822887676Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:33.896459751Z Spawning a container 'toolbox-osp' with image 'registry.redhat.io/rhel9/support-tools' [must-gather-9wq4j] POD 2026-01-22T14:29:33.896459751Z Detected RUN label in the container image. Using that as the default... [must-gather-9wq4j] POD 2026-01-22T14:29:35.146105577Z a26558db747de255628287f80b269cdf02961a7834cb7a9aaf3291d9773b088b [must-gather-9wq4j] POD 2026-01-22T14:29:35.936356563Z toolbox-osp [must-gather-9wq4j] POD 2026-01-22T14:29:36.068720495Z sos report --batch --all-logs --only-plugins block,cifs,crio,devicemapper,devices,firewall_tables,firewalld,iscsi,lvm2,memory,multipath,nfs,nis,nvme,podman,process,processor,selinux,scsi,udev,logs,crypto --tmp-dir=/var/tmp/sos-osp; exit [must-gather-9wq4j] POD 2026-01-22T14:29:36.205676425Z ]0;@crc:/[?2004h[root@crc /]# sos report --batch --all-logs --only-plugins block,cifs,crio,devicemapper,devices,firewall_tables,firewalld,iscsi,lvm2,memory,multipath,nfs,nis,nvme,podman,process,processor,selinux,scsi,udev,logs,crypto --tmp-dir=/var/tmp/sos-osp; exit [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z sos report (version 4.8.2) [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z This command will collect diagnostic and configuration information from [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z this Red Hat Enterprise Linux system and installed applications. [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z An archive containing the collected information will be generated in [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z /var/tmp/sos-osp/sos.9peg032j and may be provided to a Red Hat support [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z representative. [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z Any information provided to Red Hat will be treated in accordance with [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z the published support policies at: [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z Distribution Website : https://www.redhat.com/ [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z Commercial Support : https://access.redhat.com/ [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z The generated archive may contain data considered sensitive and its [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z content should be reviewed by the originating organization before being [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z passed to any third party. [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z No changes will be made to system configuration. [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z [must-gather-9wq4j] POD 2026-01-22T14:29:36.232822114Z Setting up archive ... [must-gather-9wq4j] POD 2026-01-22T14:29:36.238635439Z Setting up plugins ... [must-gather-9wq4j] POD 2026-01-22T14:29:38.831718449Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:38.867416851Z Error while masking key: alertmanager.yaml.gz, for file: /must-gather/namespaces/openstack/secrets/metric-storage/alertmanager-metric-storage-generated.yaml [must-gather-9wq4j] POD 2026-01-22T14:29:38.867416851Z Error while masking key: prometheus.yaml.gz, for file: /must-gather/namespaces/openstack/secrets/metric-storage/prometheus-metric-storage.yaml [must-gather-9wq4j] POD 2026-01-22T14:29:39.309884241Z Gathering data for ns/openstack-operators... [must-gather-9wq4j] POD 2026-01-22T14:29:39.754063469Z Gathering secrets in namespace openstack-operators [must-gather-9wq4j] POD 2026-01-22T14:29:42.116483511Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:42.118775356Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:42.153602573Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:42.883221271Z [plugin:fwupd] skipped command 'fwupdmgr get-approved-firmware': required services missing: fwupd. [must-gather-9wq4j] POD 2026-01-22T14:29:42.883221271Z [plugin:fwupd] skipped command 'fwupdmgr get-devices --no-unreported-check': required services missing: fwupd. [must-gather-9wq4j] POD 2026-01-22T14:29:42.883221271Z [plugin:fwupd] skipped command 'fwupdmgr get-history': required services missing: fwupd. [must-gather-9wq4j] POD 2026-01-22T14:29:42.883221271Z [plugin:fwupd] skipped command 'fwupdmgr get-remotes': required services missing: fwupd. [must-gather-9wq4j] POD 2026-01-22T14:29:42.883221271Z [plugin:fwupd] skipped command '/usr/libexec/fwupd/fwupdagent get-devices': required services missing: fwupd. [must-gather-9wq4j] POD 2026-01-22T14:29:42.883221271Z [plugin:fwupd] skipped command '/usr/libexec/fwupd/fwupdagent get-updates': required services missing: fwupd. [must-gather-9wq4j] POD 2026-01-22T14:29:43.458014500Z [plugin:networking] skipped command 'ip -s macsec show': required kmods missing: macsec. Use '--allow-system-changes' to enable collection. [must-gather-9wq4j] POD 2026-01-22T14:29:43.838718079Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:44.043473582Z Not all environment variables set. Source the environment file for the user intended to connect to the OpenStack environment. [must-gather-9wq4j] POD 2026-01-22T14:29:44.801742702Z [?2004l temporary directory /var/tmp/sos-osp does not exist or is not writable [must-gather-9wq4j] POD 2026-01-22T14:29:44.843751733Z exit [must-gather-9wq4j] POD 2026-01-22T14:29:44.855483105Z [must-gather-9wq4j] POD 2026-01-22T14:29:45.572462365Z [plugin:systemd] skipped command 'systemd-resolve --status': required services missing: systemd-resolved. [must-gather-9wq4j] POD 2026-01-22T14:29:45.572462365Z [plugin:systemd] skipped command 'systemd-resolve --statistics': required services missing: systemd-resolved. [must-gather-9wq4j] POD 2026-01-22T14:29:46.020321598Z Running plugins. Please wait ... [must-gather-9wq4j] POD 2026-01-22T14:29:46.020321598Z [must-gather-9wq4j] POD 2026-01-22T14:29:46.021270295Z Starting 3/81 auditd [Running: auditd] [must-gather-9wq4j] POD 2026-01-22T14:29:46.021817950Z Starting 4/81 autofs [Running: auditd autofs] [must-gather-9wq4j] POD 2026-01-22T14:29:46.022823229Z Starting 1/81 anacron [Running: auditd autofs anacron] [must-gather-9wq4j] POD 2026-01-22T14:29:46.023628082Z Starting 2/81 ata [Running: auditd autofs anacron ata] [must-gather-9wq4j] POD 2026-01-22T14:29:46.024495326Z Starting 5/81 block [Running: auditd autofs anacron block] [must-gather-9wq4j] POD 2026-01-22T14:29:46.031910976Z tar: Removing leading `/' from member names [must-gather-9wq4j] POD 2026-01-22T14:29:46.031910976Z tar: /var/log/pods/*/*.log.*: Warning: Cannot stat: No such file or directory [must-gather-9wq4j] POD 2026-01-22T14:29:46.037654369Z Starting 6/81 boot [Running: auditd autofs block boot] [must-gather-9wq4j] POD 2026-01-22T14:29:46.155693744Z Starting 7/81 buildah [Running: auditd block boot buildah] [must-gather-9wq4j] POD 2026-01-22T14:29:46.256623825Z Starting 8/81 ceph_common [Running: block boot buildah ceph_common] [must-gather-9wq4j] POD 2026-01-22T14:29:46.404050203Z Starting 9/81 cgroups [Running: block boot buildah cgroups] [must-gather-9wq4j] POD 2026-01-22T14:29:46.912063141Z Starting 10/81 chrony [Running: boot buildah cgroups chrony] [must-gather-9wq4j] POD 2026-01-22T14:29:47.178308485Z Starting 11/81 console [Running: boot buildah cgroups console] [must-gather-9wq4j] POD 2026-01-22T14:29:47.355662552Z Starting 12/81 containers_common [Running: boot buildah cgroups containers_common] [must-gather-9wq4j] POD 2026-01-22T14:29:47.379495117Z Starting 13/81 coredump [Running: boot buildah cgroups coredump] [must-gather-9wq4j] POD 2026-01-22T14:29:47.381969127Z Starting 14/81 cron [Running: boot buildah cgroups cron] [must-gather-9wq4j] POD 2026-01-22T14:29:47.410573058Z Starting 15/81 crypto [Running: boot buildah cgroups crypto] [must-gather-9wq4j] POD 2026-01-22T14:29:47.649899021Z Starting 16/81 dbus [Running: boot buildah cgroups dbus] [must-gather-9wq4j] POD 2026-01-22T14:29:47.702743328Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:29:47.715352325Z Starting 17/81 devicemapper [Running: boot buildah cgroups devicemapper] [must-gather-9wq4j] POD 2026-01-22T14:29:48.031967559Z Starting 18/81 devices [Running: boot buildah cgroups devices] [must-gather-9wq4j] POD 2026-01-22T14:29:48.137546861Z Starting 19/81 dnf [Running: boot buildah cgroups dnf] [must-gather-9wq4j] POD 2026-01-22T14:29:48.854555371Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:49.034042648Z Gathering data for ns/openshift-machine-api... [must-gather-9wq4j] POD 2026-01-22T14:29:49.396845660Z Gathering secrets in namespace openshift-machine-api [must-gather-9wq4j] POD 2026-01-22T14:29:49.474041808Z Starting 20/81 filesys [Running: boot buildah dnf filesys] [must-gather-9wq4j] POD 2026-01-22T14:29:49.501861587Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:29:49.832330382Z Starting 21/81 firewall_tables [Running: boot buildah dnf firewall_tables] [must-gather-9wq4j] POD 2026-01-22T14:29:49.979659728Z Starting 22/81 fwupd [Running: boot buildah dnf fwupd] [must-gather-9wq4j] POD 2026-01-22T14:29:50.088605085Z Starting 23/81 hardware [Running: boot buildah dnf hardware] [must-gather-9wq4j] POD 2026-01-22T14:29:50.302138357Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:50.678813791Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:51.022097000Z Starting 24/81 host [Running: boot buildah dnf host] [must-gather-9wq4j] POD 2026-01-22T14:29:51.142275656Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:51.246032986Z Starting 25/81 i18n [Running: buildah dnf host i18n] [must-gather-9wq4j] POD 2026-01-22T14:29:51.265169849Z Starting 26/81 insights [Running: buildah dnf host insights] [must-gather-9wq4j] POD 2026-01-22T14:29:51.301905610Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:51.359864662Z Starting 27/81 iscsi [Running: buildah dnf insights iscsi] [must-gather-9wq4j] POD 2026-01-22T14:29:51.463152660Z Starting 28/81 kdump [Running: buildah dnf insights kdump] [must-gather-9wq4j] POD 2026-01-22T14:29:51.478702220Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:29:52.177397352Z Starting 29/81 kernel [Running: buildah dnf insights kernel] [must-gather-9wq4j] POD 2026-01-22T14:29:53.407933756Z Starting 30/81 keyutils [Running: buildah dnf kernel keyutils] [must-gather-9wq4j] POD 2026-01-22T14:29:53.435336063Z Starting 31/81 krb5 [Running: buildah dnf kernel krb5] [must-gather-9wq4j] POD 2026-01-22T14:29:53.491204206Z Starting 32/81 kvm [Running: buildah dnf kernel kvm] [must-gather-9wq4j] POD 2026-01-22T14:29:53.689414274Z Starting 33/81 ldap [Running: buildah dnf kvm ldap] [must-gather-9wq4j] POD 2026-01-22T14:29:53.752243954Z Starting 34/81 libraries [Running: buildah dnf kvm libraries] [must-gather-9wq4j] POD 2026-01-22T14:29:53.754951741Z Starting 35/81 libvirt [Running: buildah dnf kvm libvirt] [must-gather-9wq4j] POD 2026-01-22T14:29:53.828474095Z Starting 36/81 login [Running: buildah dnf kvm login] [must-gather-9wq4j] POD 2026-01-22T14:29:53.866999317Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:54.048040327Z Starting 37/81 logrotate [Running: buildah dnf kvm logrotate] [must-gather-9wq4j] POD 2026-01-22T14:29:54.796884689Z Starting 38/81 logs [Running: buildah dnf kvm logs] [must-gather-9wq4j] POD 2026-01-22T14:29:56.004806303Z Starting 39/81 lvm2 [Running: buildah dnf logs lvm2] [must-gather-9wq4j] POD 2026-01-22T14:29:56.492830164Z Starting 40/81 md [Running: buildah logs lvm2 md] [must-gather-9wq4j] POD 2026-01-22T14:29:56.516040322Z Starting 41/81 memory [Running: buildah logs md memory] [must-gather-9wq4j] POD 2026-01-22T14:29:56.517500533Z Starting 42/81 multipath [Running: buildah logs memory multipath] [must-gather-9wq4j] POD 2026-01-22T14:29:56.610669223Z Starting 43/81 networking [Running: buildah logs memory networking] [must-gather-9wq4j] POD 2026-01-22T14:29:56.685354620Z Starting 44/81 networkmanager [Running: buildah logs networking networkmanager] [must-gather-9wq4j] POD 2026-01-22T14:29:57.947554541Z Starting 45/81 nfs [Running: buildah logs networking nfs] [must-gather-9wq4j] POD 2026-01-22T14:29:58.317016042Z Starting 46/81 numa [Running: buildah logs networking numa] [must-gather-9wq4j] POD 2026-01-22T14:29:58.412458537Z Starting 47/81 nvme [Running: buildah logs networking nvme] [must-gather-9wq4j] POD 2026-01-22T14:29:58.506117781Z Starting 48/81 openhpi [Running: buildah logs networking openhpi] [must-gather-9wq4j] POD 2026-01-22T14:29:58.514587951Z Starting 49/81 openstack_edpm [Running: buildah logs networking openstack_edpm] [must-gather-9wq4j] POD 2026-01-22T14:29:58.878501665Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:29:59.162717480Z Starting 50/81 openstack_neutron [Running: buildah networking openstack_edpm openstack_neutron] [must-gather-9wq4j] POD 2026-01-22T14:29:59.232871148Z Starting 51/81 openstack_nova [Running: buildah networking openstack_edpm openstack_nova] [must-gather-9wq4j] POD 2026-01-22T14:29:59.290838411Z Starting 52/81 openvswitch [Running: buildah networking openstack_edpm openvswitch] [must-gather-9wq4j] POD 2026-01-22T14:30:02.802360779Z Starting 53/81 ovn_host [Running: buildah networking openstack_edpm ovn_host] [must-gather-9wq4j] POD 2026-01-22T14:30:03.053197368Z Starting 54/81 pam [Running: buildah networking openstack_edpm pam] [must-gather-9wq4j] POD 2026-01-22T14:30:03.311493098Z Starting 55/81 pci [Running: buildah networking openstack_edpm pci] [must-gather-9wq4j] POD 2026-01-22T14:30:03.441369869Z Starting 56/81 podman [Running: buildah networking openstack_edpm podman] [must-gather-9wq4j] POD 2026-01-22T14:30:03.890847967Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:06.155462317Z Starting 57/81 process [Running: buildah openstack_edpm podman process] [must-gather-9wq4j] POD 2026-01-22T14:30:08.409343144Z Starting 58/81 processor [Running: buildah podman process processor] [must-gather-9wq4j] POD 2026-01-22T14:30:08.581091790Z tar: Removing leading `/' from hard link targets [must-gather-9wq4j] POD 2026-01-22T14:30:08.897877728Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:10.081393750Z Starting 59/81 python [Running: buildah podman processor python] [must-gather-9wq4j] POD 2026-01-22T14:30:10.132202810Z Starting 60/81 release [Running: buildah podman processor release] [must-gather-9wq4j] POD 2026-01-22T14:30:10.172477232Z Starting 61/81 rpm [Running: buildah podman processor rpm] [must-gather-9wq4j] POD 2026-01-22T14:30:10.208339978Z [must-gather-9wq4j] POD 2026-01-22T14:30:10.208339978Z Removing debug pod ... [must-gather-9wq4j] POD 2026-01-22T14:30:10.816578366Z Starting 62/81 sar [Running: buildah podman processor sar] [must-gather-9wq4j] POD 2026-01-22T14:30:10.820522228Z Starting 63/81 scsi [Running: buildah podman processor scsi] [must-gather-9wq4j] POD 2026-01-22T14:30:11.174984123Z Starting 64/81 selinux [Running: buildah podman processor selinux] [must-gather-9wq4j] POD 2026-01-22T14:30:11.263262365Z Retrieving SOS Report for crc [must-gather-9wq4j] POD 2026-01-22T14:30:13.905380774Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:15.432134493Z Starting pod/crc-debug-rmv62 ... [must-gather-9wq4j] POD 2026-01-22T14:30:15.432134493Z To use host binaries, run `chroot /host` [must-gather-9wq4j] POD 2026-01-22T14:30:15.752061679Z Starting 65/81 services [Running: buildah podman processor services] [must-gather-9wq4j] POD 2026-01-22T14:30:15.813829640Z Starting 66/81 snmp [Running: buildah podman processor snmp] [must-gather-9wq4j] POD 2026-01-22T14:30:15.813829640Z Starting 67/81 ssh [Running: buildah podman processor ssh] [must-gather-9wq4j] POD 2026-01-22T14:30:15.949658579Z Starting 68/81 subscription_manager [Running: buildah podman processor subscription_manager] [must-gather-9wq4j] POD 2026-01-22T14:30:16.141429754Z [must-gather-9wq4j] POD 2026-01-22T14:30:16.141429754Z Removing debug pod ... [must-gather-9wq4j] POD 2026-01-22T14:30:18.914804424Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:19.271723928Z Starting 69/81 sudo [Running: buildah podman subscription_manager sudo] [must-gather-9wq4j] POD 2026-01-22T14:30:19.275983349Z Starting 70/81 sunrpc [Running: buildah podman subscription_manager sunrpc] [must-gather-9wq4j] POD 2026-01-22T14:30:19.311471515Z Starting 71/81 system [Running: buildah podman subscription_manager system] [must-gather-9wq4j] POD 2026-01-22T14:30:19.652735867Z Starting 72/81 systemd [Running: buildah podman system systemd] [must-gather-9wq4j] POD 2026-01-22T14:30:20.722488924Z Starting 73/81 sysvipc [Running: buildah podman systemd sysvipc] [must-gather-9wq4j] POD 2026-01-22T14:30:20.764063993Z Starting 74/81 tpm2 [Running: buildah podman systemd tpm2] [must-gather-9wq4j] POD 2026-01-22T14:30:20.870225711Z Starting 75/81 tuned [Running: buildah podman systemd tuned] [must-gather-9wq4j] POD 2026-01-22T14:30:21.765119483Z Starting 76/81 udev [Running: buildah podman systemd udev] [must-gather-9wq4j] POD 2026-01-22T14:30:21.799268731Z Starting 77/81 unbound [Running: buildah podman systemd unbound] [must-gather-9wq4j] POD 2026-01-22T14:30:21.799895959Z Starting 78/81 vhostmd [Running: buildah podman systemd vhostmd] [must-gather-9wq4j] POD 2026-01-22T14:30:21.800331941Z Starting 79/81 virsh [Running: buildah podman systemd virsh] [must-gather-9wq4j] POD 2026-01-22T14:30:23.929814941Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:24.070129707Z Starting 80/81 xen [Running: buildah podman systemd xen] [must-gather-9wq4j] POD 2026-01-22T14:30:24.070129707Z Finishing plugins [Running: buildah podman systemd] [must-gather-9wq4j] POD 2026-01-22T14:30:24.070129707Z Starting 81/81 xfs [Running: buildah podman systemd xfs] [must-gather-9wq4j] POD 2026-01-22T14:30:24.415076354Z Finishing plugins [Running: buildah podman systemd] [must-gather-9wq4j] POD 2026-01-22T14:30:27.290499323Z Finishing plugins [Running: buildah podman] [must-gather-9wq4j] POD 2026-01-22T14:30:28.938059446Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:33.946493907Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:39.122989831Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:44.135601362Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:49.146143592Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:54.160167983Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:30:59.799240627Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:04.813698859Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:09.868278227Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:14.893818604Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:19.902062331Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:25.309174540Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:26.721724353Z Finishing plugins [Running: podman] [must-gather-9wq4j] POD 2026-01-22T14:31:30.318591380Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:35.329748948Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:40.336400989Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:45.348364149Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:50.358246512Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:31:55.367402044Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:00.375340541Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:05.384114932Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:10.395946988Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:15.406969633Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:20.415233339Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:25.427161518Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:30.437440842Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:35.446402738Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:40.455247231Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:45.467276263Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:50.478940453Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:32:55.490835002Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:00.502645249Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:06.209368981Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:11.219592343Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:17.005825018Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:22.014452663Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:27.036447398Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:32.048735909Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:37.084733351Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:42.097004332Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:47.302481847Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:52.318227266Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:33:57.334835708Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:02.346110749Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:07.356346532Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:12.377823882Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:17.380255434Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:22.392630488Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:27.404878577Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:32.418965998Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:37.427009015Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:42.436018243Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:47.445487835Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:52.456922470Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:34:57.466202996Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:03.217210202Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:03.441638413Z [must-gather-9wq4j] POD 2026-01-22T14:35:03.441638413Z Plugin podman timed out [must-gather-9wq4j] POD 2026-01-22T14:35:03.441638413Z [must-gather-9wq4j] POD 2026-01-22T14:35:03.457827862Z [must-gather-9wq4j] POD 2026-01-22T14:35:08.226206658Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:13.236760809Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:18.252515018Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:23.266965720Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:28.276613586Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:33.323671562Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:38.330811086Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:43.342369967Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:48.352529897Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:53.363544341Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:35:58.377199551Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:03.388364029Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:08.398489159Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:13.410353848Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:18.415159805Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:24.103679868Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:29.112238572Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:34.125017307Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:39.132415769Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:44.142494177Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:49.152107653Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:54.602319852Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:36:59.615639641Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z Creating compressed archive... [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z Your sos report has been generated and saved in: [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z /var/tmp/sos-osp/sosreport-standalone-2026-01-22-vzfgenb.tar.xz [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z Size 54.35MiB [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z Owner root [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z sha256 5811ab8efd43b155b09dd014ff81812de267ab7eaa01f0689db96bd256789b74 [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z Please send this file to your support representative. [must-gather-9wq4j] POD 2026-01-22T14:37:02.064170843Z [must-gather-9wq4j] POD 2026-01-22T14:37:02.083652185Z Retrieving SOS Report for standalone.ooo.test [must-gather-9wq4j] POD 2026-01-22T14:37:02.914652087Z Finished retrieving SOS Report for standalone.ooo.test [must-gather-9wq4j] POD 2026-01-22T14:37:03.455908526Z Gathering data for ns/cert-manager... [must-gather-9wq4j] POD 2026-01-22T14:37:03.775271436Z Gathering secrets in namespace cert-manager [must-gather-9wq4j] POD 2026-01-22T14:37:03.997405542Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:37:04.626583253Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:05.105220088Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:05.425818624Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:05.494313655Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:05.702854926Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:08.972180990Z Gathering data for ns/openshift-nmstate... [must-gather-9wq4j] POD 2026-01-22T14:37:09.431892698Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:37:10.007033838Z Gathering secrets in namespace openshift-nmstate [must-gather-9wq4j] POD 2026-01-22T14:37:10.010730173Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:10.475473734Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:10.829340182Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:11.166564599Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:11.598712277Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:15.033417218Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:15.396705934Z Gathering data for ns/openshift-operators... [must-gather-9wq4j] POD 2026-01-22T14:37:15.848942970Z Gathering secrets in namespace openshift-operators [must-gather-9wq4j] POD 2026-01-22T14:37:16.007642788Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:37:17.280174892Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:18.459937117Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:18.894831062Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:18.898598289Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:20.041633474Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:22.092785364Z Gathering data for ns/metallb-system... [must-gather-9wq4j] POD 2026-01-22T14:37:22.403753217Z Gathering secrets in namespace metallb-system [must-gather-9wq4j] POD 2026-01-22T14:37:24.049194930Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:24.266837518Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:24.421950064Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:25.052215746Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:27.263895506Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:37:28.610665035Z Gathering data for ns/openshift-marketplace... [must-gather-9wq4j] POD 2026-01-22T14:37:29.018419660Z Gathering secrets in namespace openshift-marketplace [must-gather-9wq4j] POD 2026-01-22T14:37:29.195251641Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:30.064461236Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:30.495314837Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:30.900822939Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:31.009376365Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:33.903458645Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:37:35.061839754Z Gathering data for ns/openshift-operators... [must-gather-9wq4j] POD 2026-01-22T14:37:35.077485658Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:35.382778390Z Gathering secrets in namespace openshift-operators [must-gather-9wq4j] POD 2026-01-22T14:37:35.600466227Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:37:37.142693127Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:37.159494973Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:37.562329077Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:37.655147122Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:37.891573879Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:40.092599843Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:42.962398953Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:43.318598247Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:43.347256258Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:43.470593246Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:43.663795851Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:44.925998251Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:45.103788210Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:45.394774980Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:45.517942944Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:45.540416010Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:45.685215725Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:47.373397383Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:47.508949367Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:47.956664831Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:47.990110227Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:48.056958207Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:49.841570894Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:50.054103705Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:50.609169004Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:51.128002509Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:51.138024202Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:51.210595425Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:52.787273430Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:52.998958198Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:53.288148987Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:53.364987851Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:53.586105935Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:55.345700523Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:55.625157667Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:37:56.119884910Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:56.134163034Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:56.136036097Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:56.138044754Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:57.453408487Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:58.669418751Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:37:59.513538617Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:00.326110600Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:00.520972641Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:00.636370075Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:00.818947309Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:01.105870124Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:01.410660494Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:02.679568665Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:02.930251706Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:03.545530429Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:04.349007415Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:04.814811419Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:05.650988340Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:07.988608139Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:08.153512492Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:08.190160268Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:08.331714122Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:08.559116184Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:09.790247146Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:10.035902724Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:10.037352415Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:10.268607096Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:10.378253457Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:10.685233820Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:10.998130770Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:11.013902446Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:11.484126616Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:11.842082580Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:12.246621772Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:12.940731584Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:13.017433184Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:13.074125827Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:13.184032306Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:13.451576583Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:14.140995923Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:15.696145388Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:16.136540655Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:16.644448771Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:16.657733876Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:16.690016519Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:16.696403490Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:16.731754750Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:18.785892839Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:18.810697161Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:18.938192527Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:18.977490568Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:19.447405959Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:20.726079996Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:21.390076447Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:21.434273827Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:21.600158349Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:21.684574646Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:21.735127576Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:21.875451555Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:22.012584324Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:24.602746694Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:24.650152735Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:24.711375306Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:24.829581990Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:24.841153607Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:25.739074564Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:27.418305059Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:27.449150052Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:27.833938675Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:27.870279653Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:27.910498931Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:28.025670678Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:28.082243198Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:29.546612486Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:29.592442943Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:29.681521082Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:29.733886853Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:29.764077237Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:30.770748070Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:31.809932513Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:31.847748373Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:31.919318927Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:31.944086137Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:31.986965880Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:34.694105279Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:34.711582423Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:34.735081348Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:34.754341263Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:34.861273087Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:35.383549309Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:35.441564260Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:35.684237004Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:35.745617520Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:35.780988801Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:36.293537598Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:37.549829840Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:38.129664241Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:38.181600490Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:38.188498835Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:38.464453560Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:39.956147872Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:40.810788254Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:40.816928118Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:41.027109982Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:41.313462802Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:41.513455988Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:41.607768406Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:41.892833759Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:42.499205610Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:42.529691602Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:42.542979698Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:42.554858224Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:42.849687063Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:42.853937873Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:44.849126665Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:44.853867699Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:44.859961091Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:45.035845046Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:45.377992773Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:45.824671977Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:46.941712912Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:47.505984051Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:48.054443414Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:48.158670222Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:48.167241144Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:48.470718768Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:49.157128503Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:49.496316236Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:49.662318322Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:50.093012654Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:50.485196136Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:50.527409260Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:50.862052345Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:50.873791627Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:50.887073823Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:51.288997480Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:51.880456369Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:52.623047203Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:52.703579021Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:52.743268093Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:52.914148267Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:53.159932039Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:53.260129902Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:53.406222265Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:53.433748263Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:53.445038362Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:53.471862931Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:53.743634898Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:55.388163822Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:55.676065245Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:55.704765186Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:55.901090119Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:38:55.949633262Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:56.349831032Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:57.670366912Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:57.720479610Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:57.746128505Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:58.244136370Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:58.596260180Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:59.641807172Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:59.880108362Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:38:59.893768859Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:00.272027208Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:00.664068566Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:00.906234026Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:01.824985331Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:02.035921507Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:02.086857318Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:02.376006406Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:02.674571451Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:03.873483222Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:04.140577866Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:05.153604519Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:05.351329931Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:05.427339250Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:05.929846783Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:05.929928176Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:06.177965901Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:07.279397755Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:07.410531934Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:07.495194368Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:08.016352459Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:08.421255451Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:09.919398374Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:10.563190784Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:10.823637740Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:10.957301181Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:11.314096913Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:11.388044724Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:11.921012889Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:11.994743314Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:12.328329719Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:14.028258870Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:14.564588610Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:14.782601246Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:15.386361783Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:15.589583001Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:15.970008321Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:16.424797754Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:16.893066968Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:16.926722680Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:16.957237503Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:16.998840520Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:17.057606062Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:18.936483895Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:18.986810339Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:19.132183240Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:19.207113560Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:19.215665792Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:20.980816767Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:21.410135390Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:21.687781193Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:21.694904505Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:21.854558430Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:21.905634505Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:23.904183542Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:24.442023564Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:24.597081780Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:24.738479069Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:24.786383074Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:24.897312722Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:24.908574370Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:24.995573061Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:25.103958337Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:25.987562719Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:27.265773101Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:27.561395683Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:27.567539736Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:27.615034880Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:27.715189853Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:29.403125195Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:29.985306722Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:30.731706452Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:30.922466458Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:30.945480429Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:30.995219915Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:31.683130943Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:31.833977289Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:33.437586876Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:33.510011125Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:33.600936526Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:33.607073920Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:33.607883773Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:35.582772750Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:35.674041382Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:36.312584323Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:36.712722040Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:36.855661923Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:36.876895064Z Gathering data for ns/openshift-operators... [must-gather-9wq4j] POD 2026-01-22T14:39:36.967679552Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:37.044068622Z Gathering data for ns/cert-manager... [must-gather-9wq4j] POD 2026-01-22T14:39:37.429625267Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:39:37.541572183Z Warning: apps.openshift.io/v1 DeploymentConfig is deprecated in v4.14+, unavailable in v4.10000+ [must-gather-9wq4j] POD 2026-01-22T14:39:37.909570271Z Gathering data for ns/metallb-system... [must-gather-9wq4j] POD 2026-01-22T14:39:37.994719470Z Gathering data for ns/openstack-operators... [must-gather-9wq4j] POD 2026-01-22T14:39:39.380693881Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:39.498611066Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:39.724852395Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:41.322755810Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:41.449185546Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:42.163108809Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:46.278273293Z Gathering data for ns/cert-manager... [must-gather-9wq4j] POD 2026-01-22T14:39:46.332991871Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:47.056182395Z Gathering data for ns/openshift-nmstate... [must-gather-9wq4j] POD 2026-01-22T14:39:47.161240237Z Gathering data for ns/openshift-machine-api... [must-gather-9wq4j] POD 2026-01-22T14:39:48.233380072Z Gathering data for ns/openstack-operators... [must-gather-9wq4j] POD 2026-01-22T14:39:50.166068156Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:39:51.344247839Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:55.549529621Z Gathering data for ns/openshift-monitoring... [must-gather-9wq4j] POD 2026-01-22T14:39:56.135695111Z Gathering data for ns/openshift-multus... [must-gather-9wq4j] POD 2026-01-22T14:39:56.355044805Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:39:59.481563156Z Wrote inspect data to /must-gather. [must-gather-9wq4j] POD 2026-01-22T14:40:01.364218435Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:06.383395438Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:11.384499630Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:16.396237023Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:21.407387038Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:26.421660543Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:31.439222629Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:36.456874939Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:41.478064839Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:46.487977740Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:51.504809007Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:40:56.515500799Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:01.529353272Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:06.545817889Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:11.567630966Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:16.584704599Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:21.593620371Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:26.607027581Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:31.615156232Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:36.627718978Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:41.638562026Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:46.650945306Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:51.662242277Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:41:56.672058595Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:01.684790125Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:06.692938486Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:11.705364387Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:16.724736236Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:22.104969882Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:27.120175361Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:32.130243217Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:37.140708403Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:42.172503363Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:47.183674521Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:52.196442283Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:42:57.209147873Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:02.219379632Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:07.229896150Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:12.239203145Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:17.250751851Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:22.265048787Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:27.282476070Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:32.296486267Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:37.313908341Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:42.911012629Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:47.925242902Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:52.935606175Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:43:58.504586909Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:03.517092123Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:08.538927841Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:13.547806402Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:18.560090031Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:24.115933472Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:29.128248541Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:34.137336991Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:39.148001863Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:44.159714445Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:49.170147230Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:54.183733025Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:44:59.196112986Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:04.205596604Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:09.220249020Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:14.230317844Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:19.612806624Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:24.623700343Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:29.638046728Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:34.649738120Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:39.657872050Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:44.668523102Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:49.679255286Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:54.690648279Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:45:59.722361977Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:04.719768633Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:09.733275616Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:15.200902303Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:20.212338537Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:25.229592426Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:30.241494833Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:35.252385161Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:40.262253841Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:45.271182554Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:51.009747654Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:46:56.018087961Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:01.030868763Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:06.039476046Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:11.047678819Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:16.057237200Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:21.068865529Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:26.079363467Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:31.089119633Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:36.100408123Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:41.109058928Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:46.122428877Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:51.136598438Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:47:56.146267282Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:01.174548543Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:06.189546877Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:11.199741816Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:16.209489333Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:21.218837647Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:26.230388475Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:31.252368917Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:36.285812623Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:41.291957579Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:46.308427785Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:51.406900331Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:48:56.424339434Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:01.433858314Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:06.442273973Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:11.473241329Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:17.321108972Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:22.333475401Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:27.344845564Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:33.203900292Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:38.213675159Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:43.226539383Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:48.243478823Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:53.254351060Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:49:58.281876039Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:03.303675417Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:08.324998360Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:13.336796334Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:18.416654553Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:23.428815858Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:28.441172359Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:33.459350923Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:38.469010197Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:43.483593360Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:48.493489601Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:53.503073862Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:50:58.514510356Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:03.525322313Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:08.537424586Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:13.547888303Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:18.561765086Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:23.803644847Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:28.818517559Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:34.404846762Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:39.553323183Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:44.656145452Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:50.505487116Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:51:55.518048551Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:00.534741734Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:05.548490204Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:10.559057583Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:15.576123317Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:20.591708568Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:26.104398260Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:31.135616003Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:36.145173234Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:41.293041417Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:46.369879620Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:51.423966042Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:52:56.453755575Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:01.464162740Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:06.476854379Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:11.489573379Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:16.910261619Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:21.925035587Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:26.934849936Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:32.099433061Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:37.110047022Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:42.121459586Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:47.136441039Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:52.151954699Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:53:57.162335182Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:02.299499703Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:07.310581346Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:12.331258282Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:17.348814209Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:22.360085678Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:27.376236585Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:32.388501382Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:37.395234065Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:42.403956893Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:47.421008835Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:52.440838657Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:54:57.452695042Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:02.467197205Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:07.476787330Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:12.491703454Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:17.508976236Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:22.518885389Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:27.530674796Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:32.542618067Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:37.552960442Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:42.566129658Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:47.577628646Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:52.593614211Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:55:57.605797640Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:02.616001841Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:07.626756329Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:12.637001711Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:17.651656009Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:22.672380518Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:27.691545902Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:32.702030512Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:37.713716335Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:42.724656338Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:47.736643171Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:52.749682302Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:56:57.761588083Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:02.771524357Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:07.779950209Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:12.789094851Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:17.801839524Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:22.821587266Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:27.837475398Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:32.848357589Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:37.868550483Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:42.880409571Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:47.889751809Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:52.899327882Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:57:57.909323169Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:02.925255383Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:07.935380042Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:12.946467769Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:17.956157226Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:22.975608829Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:27.985559594Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:33.024958840Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:38.036983154Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:43.053661657Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:48.064056825Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:53.072042254Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:58:58.080764634Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:03.092361066Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:08.104525362Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:13.116008081Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:18.129779113Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:23.140798467Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:28.199191460Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:33.208233659Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:38.221858808Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:43.230387923Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:48.260002622Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:53.272814608Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T14:59:58.287333571Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:03.296204186Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:08.307324104Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:13.316016903Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:18.335857437Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:23.344099023Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:28.356722013Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:33.373819340Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:38.461273945Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:43.475654513Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:48.500000774Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:53.511314978Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:00:58.522896479Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:03.537845044Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:08.549763734Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:13.559434370Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:18.573897253Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:23.583684153Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:28.628496292Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:33.633573309Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:38.647862366Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:43.656139824Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:48.671477831Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:53.680100438Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:01:58.693732927Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:03.721367220Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:08.732416026Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:13.741985010Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:18.751924593Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:23.761592751Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:28.769532118Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:33.779430982Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:38.787948886Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:43.798997381Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:48.812390494Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:53.823398628Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:02:58.833430675Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:03.844486701Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:08.861117324Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:13.872406847Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:18.882306210Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:23.898882612Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:28.913899620Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:33.938189019Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:38.948058642Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:43.957810330Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:48.967702344Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:53.978403660Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:03:58.990726181Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:04.007427517Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:09.016142146Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:14.026271436Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:19.036873200Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:24.048893272Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:29.058727774Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:34.081368546Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:39.092472533Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:44.103148549Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:49.113451423Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:54.125177099Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:04:59.134653839Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:04.147683701Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:09.160149827Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:14.175133694Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:19.188453794Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:24.201941328Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:29.215493535Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:34.227702174Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:39.241769834Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:44.252756708Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:49.263646659Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:54.286024365Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:05:59.298069389Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:04.309024512Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:09.320214612Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:14.357353274Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:19.380836691Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:24.394941363Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:29.411153714Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:34.424801534Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:39.435407676Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:44.445798064Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:49.457343304Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:54.471276670Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:06:59.480800533Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:04.489210433Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:09.498934262Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:14.512701285Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:19.522085173Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:24.533131509Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:29.543641849Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:34.554106559Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:39.567531192Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:44.576605702Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:49.590000834Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:54.604303181Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:07:59.618433454Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:04.640928774Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:09.650240190Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:14.660533354Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:19.673849404Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:24.685672852Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:29.695978357Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:34.706635941Z [disk usage checker] Volume usage percentage: current = 52 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:39.715738242Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:44.727393393Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:49.743013699Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:54.755977889Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:08:59.768352112Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:04.778885213Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:09.790156774Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:14.798992768Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:19.815721764Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:24.838566602Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:29.849637959Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:34.857814893Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:39.866519003Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:44.874231405Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:49.880893756Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:54.888746302Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:09:59.902981487Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:04.913049695Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:09.923107524Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:14.937035980Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:19.949419714Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:24.958825903Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:29.969405615Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:34.981513992Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:39.991612480Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:45.023454233Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:50.033066568Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:10:55.044398762Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:00.053883304Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:05.065048042Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:10.077131337Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:15.086756231Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:20.098032964Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:25.133209361Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:30.142697942Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:35.152047191Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:40.165207415Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:45.180785749Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:50.190270291Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:11:55.199726322Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:00.209477781Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:05.219532828Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:10.228955168Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:15.239274762Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:20.248413026Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:25.258944739Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:30.266965212Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:35.278795792Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:40.291973960Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:45.301510885Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:50.308692815Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:12:55.322560951Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:13:00.339039242Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:13:05.352182519Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:13:10.364243755Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:13:15.376377804Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:13:20.386473525Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:13:25.401404082Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:13:30.412835001Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 [must-gather-9wq4j] POD 2026-01-22T15:13:35.420283087Z [disk usage checker] Volume usage percentage: current = 51 ; allowed = 80 The must gather command did not finish on time! 2700.0 seconds was not enough to finish the task. home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_011_run_standalone.log0000644000175000017500000454426015134437263027515 0ustar zuulzuul~/src/github.com/openstack-k8s-operators/install_yamls/devsetup ~/ci-framework-data/artifacts scripts/standalone.sh 0 libvirt ''[]'' ''[]'' ++ mktemp -d + MY_TMP_DIR=/tmp/tmp.BZ3C5ORKFo + trap 'rv=$?; rm -rf -- "$MY_TMP_DIR"; exit $rv' EXIT + export VIRSH_DEFAULT_CONNECT_URI=qemu:///system + VIRSH_DEFAULT_CONNECT_URI=qemu:///system +++ dirname scripts/standalone.sh ++ cd scripts ++ pwd -P + SCRIPTPATH=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/scripts + EDPM_COMPUTE_SUFFIX=0 + COMPUTE_DRIVER=libvirt + EDPM_COMPUTE_ADDITIONAL_NETWORKS='[]' + EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES='[]' + EDPM_COMPUTE_NAME=edpm-compute-0 + EDPM_COMPUTE_NETWORK=default + STANDALONE_VM=false + [[ false == \t\r\u\e ]] + IP_ADRESS_SUFFIX=100 + IP=192.168.122.100 + OS_NET_CONFIG_IFACE=nic2 + GATEWAY=192.168.122.10 + OUTPUT_DIR=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/scripts/../../out/edpm/ + SSH_KEY_FILE=/home/zuul/.ssh/id_rsa + SSH_OPT='-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa' + REPO_SETUP_CMDS=/home/zuul/cdn_subscription_repos.sh + CMDS_FILE=/tmp/tmp.BZ3C5ORKFo/standalone_cmds + SKIP_TRIPLEO_REPOS=false + CLEANUP_DIR_CMD='rm -Rf' + EDPM_COMPUTE_VCPUS=8 + EDPM_COMPUTE_RAM=20 + EDPM_COMPUTE_DISK_SIZE=70 + EDPM_CONFIGURE_HUGEPAGES=false + EDPM_COMPUTE_CEPH_ENABLED=false + EDPM_COMPUTE_CEPH_NOVA=false + EDPM_COMPUTE_SRIOV_ENABLED=true + EDPM_COMPUTE_DHCP_AGENT_ENABLED=true + BARBICAN_ENABLED=false + MANILA_ENABLED=true + SWIFT_REPLICATED=false + TLSE_ENABLED=true + CLOUD_DOMAIN=ooo.test + OCTAVIA_ENABLED=true + DESIGNATE_ENABLED=false + TELEMETRY_ENABLED=true + [[ ! -f /home/zuul/.ssh/id_rsa ]] + source /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/scripts/common.sh ++ OVN_NBCTL=ovn-nbctl ++ OVN_SBCTL=ovn-sbctl ++ OVN_NB_SERVICE=ovsdbserver-nb ++ OVN_SB_SERVICE=ovsdbserver-sb + [[ ! -f /home/zuul/cdn_subscription_repos.sh ]] + [[ -e /run/systemd/resolve/resolv.conf ]] + HOST_PRIMARY_RESOLV_CONF_ENTRY=192.168.122.10 + [[ ! -f /tmp/tmp.BZ3C5ORKFo/standalone_cmds ]] + cat ++ ssh -o BatchMode=yes -o ConnectTimeout=5 -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa root@192.168.122.100 echo ok Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + [[ ok != \o\k ]] + '[' libvirt == ironic ']' + PRIMARY_RESOLV_CONF_ENTRY=192.168.122.10 ++ mktemp --suffix=.yaml --tmpdir=/tmp/tmp.BZ3C5ORKFo + J2_VARS_FILE=/tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml + cat + jinja2_render standalone/network_data.j2 /tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml + local j2_template_file + local j2_vars + j2_template_file=standalone/network_data.j2 + j2_vars_file=/tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml + /usr/bin/python3 -c ' import jinja2 import os import yaml def to_bool(a): '\'''\'''\'' return a bool for the arg '\'''\'''\'' if a is None or isinstance(a, bool): return a if isinstance(a, str): a = a.lower() if a in ('\''yes'\'', '\''on'\'', '\''1'\'', '\''true'\'', 1): return True return False with open('\''/tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml'\'', '\''r'\'') as f: vars = yaml.safe_load(f.read()) loader = jinja2.FileSystemLoader(os.path.dirname('\''standalone/network_data.j2'\'')) env = jinja2.Environment(autoescape=True, loader=loader) env.filters['\''bool'\''] = to_bool print(env.get_template(os.path.basename('\''standalone/network_data.j2'\'')).render(**vars)) ' + jinja2_render standalone/deployed_network.j2 /tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml + local j2_template_file + local j2_vars + j2_template_file=standalone/deployed_network.j2 + j2_vars_file=/tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml + /usr/bin/python3 -c ' import jinja2 import os import yaml def to_bool(a): '\'''\'''\'' return a bool for the arg '\'''\'''\'' if a is None or isinstance(a, bool): return a if isinstance(a, str): a = a.lower() if a in ('\''yes'\'', '\''on'\'', '\''1'\'', '\''true'\'', 1): return True return False with open('\''/tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml'\'', '\''r'\'') as f: vars = yaml.safe_load(f.read()) loader = jinja2.FileSystemLoader(os.path.dirname('\''standalone/deployed_network.j2'\'')) env = jinja2.Environment(autoescape=True, loader=loader) env.filters['\''bool'\''] = to_bool print(env.get_template(os.path.basename('\''standalone/deployed_network.j2'\'')).render(**vars)) ' + jinja2_render standalone/net_config.j2 /tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml + local j2_template_file + local j2_vars + j2_template_file=standalone/net_config.j2 + j2_vars_file=/tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml + /usr/bin/python3 -c ' import jinja2 import os import yaml def to_bool(a): '\'''\'''\'' return a bool for the arg '\'''\'''\'' if a is None or isinstance(a, bool): return a if isinstance(a, str): a = a.lower() if a in ('\''yes'\'', '\''on'\'', '\''1'\'', '\''true'\'', 1): return True return False with open('\''/tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml'\'', '\''r'\'') as f: vars = yaml.safe_load(f.read()) loader = jinja2.FileSystemLoader(os.path.dirname('\''standalone/net_config.j2'\'')) env = jinja2.Environment(autoescape=True, loader=loader) env.filters['\''bool'\''] = to_bool print(env.get_template(os.path.basename('\''standalone/net_config.j2'\'')).render(**vars)) ' + jinja2_render standalone/role.j2 /tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml + local j2_template_file + local j2_vars + j2_template_file=standalone/role.j2 + j2_vars_file=/tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml + /usr/bin/python3 -c ' import jinja2 import os import yaml def to_bool(a): '\'''\'''\'' return a bool for the arg '\'''\'''\'' if a is None or isinstance(a, bool): return a if isinstance(a, str): a = a.lower() if a in ('\''yes'\'', '\''on'\'', '\''1'\'', '\''true'\'', 1): return True return False with open('\''/tmp/tmp.BZ3C5ORKFo/tmp.q3CZDZ8ooF.yaml'\'', '\''r'\'') as f: vars = yaml.safe_load(f.read()) loader = jinja2.FileSystemLoader(os.path.dirname('\''standalone/role.j2'\'')) env = jinja2.Environment(autoescape=True, loader=loader) env.filters['\''bool'\''] = to_bool print(env.get_template(os.path.basename('\''standalone/role.j2'\'')).render(**vars)) ' + scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa /home/zuul/cdn_subscription_repos.sh root@192.168.122.100:/tmp/repo-setup.sh Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa /tmp/tmp.BZ3C5ORKFo/standalone_cmds root@192.168.122.100:/tmp/standalone-deploy.sh Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa /tmp/tmp.BZ3C5ORKFo/net_config.yaml root@192.168.122.100:/tmp/net_config.yaml Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa /tmp/tmp.BZ3C5ORKFo/network_data.yaml root@192.168.122.100:/tmp/network_data.yaml Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa /tmp/tmp.BZ3C5ORKFo/deployed_network.yaml root@192.168.122.100:/tmp/deployed_network.yaml Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa /tmp/tmp.BZ3C5ORKFo/Standalone.yaml root@192.168.122.100:/tmp/Standalone.yaml Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/scripts/../standalone/hugepages.yaml root@192.168.122.100:hugepages.yaml Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + [[ false == \t\r\u\e ]] + scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa standalone/openstack.sh root@192.168.122.100:/tmp/openstack.sh Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa standalone/post_config/ironic.sh root@192.168.122.100:/tmp/ironic_post.sh Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + '[' -f /home/zuul/.ssh/id_ecdsa.pub ']' + ssh-keygen -t ecdsa -f /home/zuul/.ssh/id_ecdsa -q -N '' + scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa /home/zuul/.ssh/id_ecdsa.pub root@192.168.122.100:/root/.ssh/id_ecdsa.pub Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + [[ -f /home/zuul/containers-prepare-parameters.yaml ]] + [[ -z false ]] + [[ false == \f\a\l\s\e ]] + ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa root@192.168.122.100 'bash /tmp/repo-setup.sh' Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. Repository 'openstack-17.1-for-rhel-9-x86_64-rpms' is enabled for this system. Repository 'rhceph-7-tools-for-rhel-9-x86_64-rpms' is enabled for this system. Repository 'fast-datapath-for-rhel-9-x86_64-rpms' is enabled for this system. Repository 'rhel-9-for-x86_64-highavailability-eus-rpms' is enabled for this system. + ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa root@192.168.122.100 'rm -f /tmp/repo-setup.sh' Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + [[ -n '' ]] + ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa root@192.168.122.100 'bash /tmp/standalone-deploy.sh' Warning: Permanently added '192.168.122.100' (ECDSA) to the list of known hosts. + sudo dnf install -y podman python3-tripleoclient util-linux lvm2 cephadm Updating Subscription Management repositories. Red Hat Enterprise Linux 9 for x86_64 - High Av 6.1 MB/s | 2.5 MB 00:00 Package podman-2:4.4.1-22.el9_2.1.x86_64 is already installed. Package util-linux-2.37.4-10.el9.x86_64 is already installed. Dependencies resolved. ======================================================================================================================================== Package Arch Version Repository Size ======================================================================================================================================== Installing: cephadm noarch 2:18.2.1-361.el9cp rhceph-7-tools-for-rhel-9-x86_64-rpms 354 k lvm2 x86_64 9:2.03.17-7.el9_2.2 rhel-9-for-x86_64-baseos-eus-rpms 1.5 M python3-tripleoclient noarch 16.5.1-17.1.20250728123209.f3599d0.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 552 k Upgrading: device-mapper x86_64 9:1.02.187-7.el9_2.2 rhel-9-for-x86_64-baseos-eus-rpms 142 k device-mapper-libs x86_64 9:1.02.187-7.el9_2.2 rhel-9-for-x86_64-baseos-eus-rpms 176 k libblkid x86_64 2.37.4-11.el9_2 rhel-9-for-x86_64-baseos-rpms 109 k libfdisk x86_64 2.37.4-11.el9_2 rhel-9-for-x86_64-baseos-rpms 156 k libmount x86_64 2.37.4-11.el9_2 rhel-9-for-x86_64-baseos-rpms 136 k libsmartcols x86_64 2.37.4-11.el9_2 rhel-9-for-x86_64-baseos-rpms 65 k libuuid x86_64 2.37.4-11.el9_2 rhel-9-for-x86_64-baseos-rpms 30 k util-linux x86_64 2.37.4-11.el9_2 rhel-9-for-x86_64-baseos-rpms 2.3 M util-linux-core x86_64 2.37.4-11.el9_2 rhel-9-for-x86_64-baseos-rpms 464 k Installing dependencies: ansible-collection-ansible-netcommon noarch 2.2.0-1.2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 181 k ansible-collection-ansible-posix noarch 1.2.0-1.3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 124 k ansible-collection-ansible-utils noarch 2.3.0-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 91 k ansible-collection-community-general noarch 4.0.0-1.1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 1.3 M ansible-collection-containers-podman noarch 1.9.4-5.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 243 k ansible-collections-openstack noarch 1.9.1-17.1.20250511000957.0e9a6f2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 146 k ansible-config_template noarch 2.0.1-17.1.20230621083924.7951228.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 29 k ansible-freeipa noarch 1.9.2-3.el9_2 rhel-9-for-x86_64-appstream-rpms 452 k ansible-pacemaker noarch 1.0.4-17.1.20231213170757.7c10fdb.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 21 k ansible-role-atos-hsm noarch 1.0.1-17.1.20230927001631.8618a22.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 17 k ansible-role-chrony noarch 1.3.1-17.1.20230621084226.0111661.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 22 k ansible-role-collectd-config noarch 0.0.3-17.1.20230620165926.1992666.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 53 k ansible-role-container-registry noarch 1.4.1-17.1.20230621045806.a091b9c.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 24 k ansible-role-lunasa-hsm noarch 1.1.1-17.1.20250603180833.5b8127c.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 19 k ansible-role-metalsmith-deployment noarch 1.4.4-17.1.20240522060758.5e7461e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 21 k ansible-role-openstack-operations noarch 0.0.1-17.1.20230620170737.2ab288f.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 30 k ansible-role-qdr-config noarch 0.0.2-17.1.20230620171136.b456651.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 21 k ansible-role-redhat-subscription noarch 1.3.0-17.1.20230621033420.eefe501.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 28 k ansible-role-thales-hsm noarch 3.0.1-17.1.20231007220803.f95c0fc.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 21 k ansible-role-tripleo-modify-image noarch 1.5.1-17.1.20230621064242.b6eedb6.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 27 k ansible-tripleo-ipa noarch 0.3.1-17.1.20230627190951.8d29d9e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 36 k ansible-tripleo-ipsec noarch 11.0.1-17.1.20230620172008.b5559c8.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 38 k augeas-libs x86_64 1.13.0-3.el9 rhel-9-for-x86_64-appstream-rpms 459 k boost-atomic x86_64 1.75.0-8.el9 rhel-9-for-x86_64-appstream-rpms 19 k boost-chrono x86_64 1.75.0-8.el9 rhel-9-for-x86_64-appstream-rpms 26 k boost-filesystem x86_64 1.75.0-8.el9 rhel-9-for-x86_64-appstream-rpms 59 k boost-locale x86_64 1.75.0-8.el9 rhel-9-for-x86_64-appstream-rpms 216 k boost-log x86_64 1.75.0-8.el9 rhel-9-for-x86_64-appstream-rpms 410 k boost-nowide x86_64 1.75.0-8.el9 rhel-9-for-x86_64-appstream-rpms 17 k boost-program-options x86_64 1.75.0-8.el9 rhel-9-for-x86_64-appstream-rpms 108 k boost-regex x86_64 1.75.0-8.el9 rhel-9-for-x86_64-appstream-rpms 279 k boost-system x86_64 1.75.0-8.el9 rhel-9-for-x86_64-appstream-rpms 15 k boost-thread x86_64 1.75.0-8.el9 rhel-9-for-x86_64-appstream-rpms 57 k buildah x86_64 1:1.29.5-1.el9_2 rhel-9-for-x86_64-appstream-eus-rpms 8.6 M cpp-hocon x86_64 0.3.0-7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 351 k device-mapper-event x86_64 9:1.02.187-7.el9_2.2 rhel-9-for-x86_64-baseos-eus-rpms 32 k device-mapper-event-libs x86_64 9:1.02.187-7.el9_2.2 rhel-9-for-x86_64-baseos-eus-rpms 31 k device-mapper-persistent-data x86_64 0.9.0-13.el9 rhel-9-for-x86_64-baseos-rpms 786 k dib-utils noarch 0.0.11-17.1.20230620173328.51661c3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 12 k facter x86_64 1:3.14.19-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 518 k glibc-langpack-en x86_64 2.34-60.el9 rhel-9-for-x86_64-baseos-rpms 675 k golang-github-vbatts-tar-split x86_64 0.11.1-9.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 1.0 M heat-cfntools noarch 1.4.2-6.1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 77 k hiera noarch 3.6.0-6.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 33 k ipa-client-common noarch 4.10.1-12.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 44 k ipa-common noarch 4.10.1-12.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 656 k ipa-selinux noarch 4.10.1-6.el9 rhel-9-for-x86_64-appstream-rpms 40 k iptables-nft-services noarch 1.8.8-6.el9_1 rhel-9-for-x86_64-appstream-rpms 28 k jq x86_64 1.6-14.el9 rhel-9-for-x86_64-appstream-rpms 190 k krb5-pkinit x86_64 1.20.1-8.el9 rhel-9-for-x86_64-baseos-rpms 62 k krb5-workstation x86_64 1.20.1-8.el9 rhel-9-for-x86_64-baseos-rpms 542 k leatherman x86_64 1.12.6-5.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 405 k libaio x86_64 0.3.111-13.el9 rhel-9-for-x86_64-baseos-rpms 26 k libburn x86_64 1.5.4-4.el9 rhel-9-for-x86_64-appstream-rpms 175 k libipa_hbac x86_64 2.8.2-5.el9_2.5 rhel-9-for-x86_64-baseos-eus-rpms 36 k libisoburn x86_64 1.5.4-4.el9 rhel-9-for-x86_64-appstream-rpms 419 k libisofs x86_64 1.5.4-4.el9 rhel-9-for-x86_64-appstream-rpms 224 k libkadm5 x86_64 1.20.1-8.el9 rhel-9-for-x86_64-baseos-rpms 81 k libpkgconf x86_64 1.7.3-10.el9 rhel-9-for-x86_64-baseos-rpms 37 k libselinux-ruby x86_64 3.5-1.el9 rhel-9-for-x86_64-appstream-rpms 59 k libsodium x86_64 1.0.18-7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 162 k libunwind x86_64 1.6.2-1.el9cp rhceph-7-tools-for-rhel-9-x86_64-rpms 68 k libxslt x86_64 1.1.34-11.el9_2 rhel-9-for-x86_64-appstream-eus-rpms 245 k lvm2-libs x86_64 9:2.03.17-7.el9_2.2 rhel-9-for-x86_64-baseos-eus-rpms 1.0 M oniguruma x86_64 6.9.6-1.el9.5 rhel-9-for-x86_64-appstream-rpms 221 k openpgm x86_64 5.2.122-26.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 178 k openssl-perl x86_64 1:3.0.7-6.el9_2 rhel-9-for-x86_64-appstream-rpms 42 k openstack-ansible-core x86_64 2.14.2-4.7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 2.3 M openstack-heat-agents noarch 2.2.1-17.1.20230620232028.ed16cc7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 9.6 k openstack-heat-api noarch 1:16.1.1-17.1.20250703110808.edc6d60.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 16 k openstack-heat-common noarch 1:16.1.1-17.1.20250703110808.edc6d60.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 1.6 M openstack-heat-engine noarch 1:16.1.1-17.1.20250703110808.edc6d60.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 15 k openstack-heat-monolith noarch 1:16.1.1-17.1.20250703110808.edc6d60.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 16 k openstack-selinux noarch 0.8.37-17.1.20231107080825.05dd1b2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 221 k openstack-tripleo-common noarch 15.4.1-17.1.20250908140822.e5b18f2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 48 k openstack-tripleo-common-containers noarch 15.4.1-17.1.20250908140822.e5b18f2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 74 k openstack-tripleo-heat-templates noarch 14.3.1-17.1.20251015110812.e7c7ce3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 611 k openstack-tripleo-validations noarch 14.3.2-17.1.20250120160809.2b526f8.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 204 k os-apply-config noarch 13.1.1-17.1.20231020001757.3c7f9b9.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 54 k os-collect-config noarch 13.1.1-17.1.20231020001748.76173d8.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 50 k os-refresh-config noarch 13.1.1-17.1.20231020000854.812905b.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 21 k perl-AutoLoader noarch 5.74-480.el9 rhel-9-for-x86_64-appstream-rpms 23 k perl-B x86_64 1.80-480.el9 rhel-9-for-x86_64-appstream-rpms 185 k perl-Carp noarch 1.50-460.el9 rhel-9-for-x86_64-appstream-rpms 31 k perl-Class-Struct noarch 0.66-480.el9 rhel-9-for-x86_64-appstream-rpms 24 k perl-Data-Dumper x86_64 2.174-462.el9 rhel-9-for-x86_64-appstream-rpms 59 k perl-Digest noarch 1.19-4.el9 rhel-9-for-x86_64-appstream-rpms 29 k perl-Digest-MD5 x86_64 2.58-4.el9 rhel-9-for-x86_64-appstream-rpms 39 k perl-Encode x86_64 4:3.08-462.el9 rhel-9-for-x86_64-appstream-rpms 1.7 M perl-Errno x86_64 1.30-480.el9 rhel-9-for-x86_64-appstream-rpms 17 k perl-Exporter noarch 5.74-461.el9 rhel-9-for-x86_64-appstream-rpms 34 k perl-Fcntl x86_64 1.13-480.el9 rhel-9-for-x86_64-appstream-rpms 23 k perl-File-Basename noarch 2.85-480.el9 rhel-9-for-x86_64-appstream-rpms 19 k perl-File-Path noarch 2.18-4.el9 rhel-9-for-x86_64-appstream-rpms 38 k perl-File-Temp noarch 1:0.231.100-4.el9 rhel-9-for-x86_64-appstream-rpms 63 k perl-File-stat noarch 1.09-480.el9 rhel-9-for-x86_64-appstream-rpms 19 k perl-FileHandle noarch 2.03-480.el9 rhel-9-for-x86_64-appstream-rpms 17 k perl-Getopt-Long noarch 1:2.52-4.el9 rhel-9-for-x86_64-appstream-rpms 64 k perl-Getopt-Std noarch 1.12-480.el9 rhel-9-for-x86_64-appstream-rpms 17 k perl-HTTP-Tiny noarch 0.076-461.el9_2 rhel-9-for-x86_64-appstream-eus-rpms 57 k perl-IO x86_64 1.43-480.el9 rhel-9-for-x86_64-appstream-rpms 94 k perl-IO-Socket-IP noarch 0.41-5.el9 rhel-9-for-x86_64-appstream-rpms 45 k perl-IO-Socket-SSL noarch 2.073-1.el9 rhel-9-for-x86_64-appstream-rpms 223 k perl-IPC-Open3 noarch 1.21-480.el9 rhel-9-for-x86_64-appstream-rpms 25 k perl-MIME-Base64 x86_64 3.16-4.el9 rhel-9-for-x86_64-appstream-rpms 34 k perl-Mozilla-CA noarch 20200520-6.el9 rhel-9-for-x86_64-appstream-rpms 14 k perl-Net-SSLeay x86_64 1.92-2.el9 rhel-9-for-x86_64-appstream-rpms 392 k perl-POSIX x86_64 1.94-480.el9 rhel-9-for-x86_64-appstream-rpms 99 k perl-PathTools x86_64 3.78-461.el9 rhel-9-for-x86_64-appstream-rpms 92 k perl-Pod-Escapes noarch 1:1.07-460.el9 rhel-9-for-x86_64-appstream-rpms 22 k perl-Pod-Perldoc noarch 3.28.01-461.el9 rhel-9-for-x86_64-appstream-rpms 92 k perl-Pod-Simple noarch 1:3.42-4.el9 rhel-9-for-x86_64-appstream-rpms 229 k perl-Pod-Usage noarch 4:2.01-4.el9 rhel-9-for-x86_64-appstream-rpms 43 k perl-Scalar-List-Utils x86_64 4:1.56-461.el9 rhel-9-for-x86_64-appstream-rpms 77 k perl-SelectSaver noarch 1.02-480.el9 rhel-9-for-x86_64-appstream-rpms 13 k perl-Socket x86_64 4:2.031-4.el9 rhel-9-for-x86_64-appstream-rpms 58 k perl-Storable x86_64 1:3.21-460.el9 rhel-9-for-x86_64-appstream-rpms 98 k perl-Symbol noarch 1.08-480.el9 rhel-9-for-x86_64-appstream-rpms 16 k perl-Term-ANSIColor noarch 5.01-461.el9 rhel-9-for-x86_64-appstream-rpms 51 k perl-Term-Cap noarch 1.17-460.el9 rhel-9-for-x86_64-appstream-rpms 24 k perl-Text-ParseWords noarch 3.30-460.el9 rhel-9-for-x86_64-appstream-rpms 18 k perl-Text-Tabs+Wrap noarch 2013.0523-460.el9 rhel-9-for-x86_64-appstream-rpms 25 k perl-Time-Local noarch 2:1.300-7.el9 rhel-9-for-x86_64-appstream-rpms 37 k perl-URI noarch 5.09-3.el9 rhel-9-for-x86_64-appstream-rpms 125 k perl-base noarch 2.27-480.el9 rhel-9-for-x86_64-appstream-rpms 18 k perl-constant noarch 1.33-461.el9 rhel-9-for-x86_64-appstream-rpms 25 k perl-if noarch 0.60.800-480.el9 rhel-9-for-x86_64-appstream-rpms 16 k perl-interpreter x86_64 4:5.32.1-480.el9 rhel-9-for-x86_64-appstream-rpms 75 k perl-libnet noarch 3.13-4.el9 rhel-9-for-x86_64-appstream-rpms 134 k perl-libs x86_64 4:5.32.1-480.el9 rhel-9-for-x86_64-appstream-rpms 2.2 M perl-mro x86_64 1.23-480.el9 rhel-9-for-x86_64-appstream-rpms 31 k perl-overload noarch 1.31-480.el9 rhel-9-for-x86_64-appstream-rpms 47 k perl-overloading noarch 0.02-480.el9 rhel-9-for-x86_64-appstream-rpms 15 k perl-parent noarch 1:0.238-460.el9 rhel-9-for-x86_64-appstream-rpms 16 k perl-podlators noarch 1:4.14-460.el9 rhel-9-for-x86_64-appstream-rpms 118 k perl-subs noarch 1.03-480.el9 rhel-9-for-x86_64-appstream-rpms 13 k perl-vars noarch 1.05-480.el9 rhel-9-for-x86_64-appstream-rpms 15 k pkgconf x86_64 1.7.3-10.el9 rhel-9-for-x86_64-baseos-rpms 45 k pkgconf-m4 noarch 1.7.3-10.el9 rhel-9-for-x86_64-baseos-rpms 16 k pkgconf-pkg-config x86_64 1.7.3-10.el9 rhel-9-for-x86_64-baseos-rpms 12 k puppet noarch 7.10.0-4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 11 k puppet-aodh noarch 18.4.2-17.1.20230620151218.3e47b5a.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 62 k puppet-apache noarch 6.5.2-17.1.20250811150836.e4a1532.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 253 k puppet-archive noarch 4.6.1-17.1.20230620181607.bc7e4ff.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 44 k puppet-auditd noarch 2.2.1-17.1.20230620192453.189b22b.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 28 k puppet-barbican noarch 18.4.2-17.1.20250625204151.af6c77b.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 57 k puppet-ceilometer noarch 18.4.3-17.1.20230927010818.3838907.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 70 k puppet-certmonger noarch 2.7.1-17.1.20230620184730.3e2e660.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 26 k puppet-cinder noarch 18.5.2-17.1.20230621054224.6aa60e7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 120 k puppet-collectd noarch 13.0.1-17.1.20230620193147.ad138a7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 135 k puppet-concat noarch 6.2.1-17.1.20250725131333.dfeabb9.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 44 k puppet-corosync noarch 8.0.1-17.1.20230621104908.6a9da9a.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 68 k puppet-designate noarch 18.6.1-17.1.20230621061456.f4c0b89.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 69 k puppet-dns noarch 8.2.1-17.1.20230621002045.70f5b28.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 36 k puppet-etcd noarch 1.12.3-17.1.20230620193226.e143c2d.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 25 k puppet-fdio noarch 18.2-1.20220727113954.6fd1c8e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 25 k puppet-firewall noarch 3.4.1-17.1.20250804151611.94f707c.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 73 k puppet-git noarch 0.5.0-17.1.20250725114148.4e4498e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 26 k puppet-glance noarch 18.6.1-17.1.20250220151014.81b081d.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 87 k puppet-gnocchi noarch 18.4.3-17.1.20230621061020.7584b94.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 58 k puppet-haproxy noarch 4.2.2-17.1.20250725114653.a797b8c.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 55 k puppet-headless noarch 7.10.0-4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 1.4 M puppet-heat noarch 18.4.1-17.1.20230621111632.3b41bb0.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 70 k puppet-horizon noarch 18.6.1-17.1.20251008130751.8074e69.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 59 k puppet-inifile noarch 4.2.1-17.1.20250804150825.df46d2a.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 46 k puppet-ipaclient noarch 2.5.2-17.1.20250725102608.b086731.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 21 k puppet-ironic noarch 18.7.1-17.1.20240305210817.edf93f9.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 121 k puppet-keepalived noarch 0.0.2-17.1.20250725114922.bbca37a.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 17 k puppet-keystone noarch 18.6.1-17.1.20251008090800.cb0adcb.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 126 k puppet-kmod noarch 2.5.0-17.1.20230620190646.52e31e3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 21 k puppet-manila noarch 18.5.2-17.1.20231102190827.a72a7d5.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 81 k puppet-memcached noarch 6.0.0-17.1.20230620184630.4c70dbd.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 23 k puppet-module-data noarch 0.5.1-17.1.20250725115430.28dafce.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 16 k puppet-mysql noarch 10.6.1-17.1.20230621022141.937d044.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 99 k puppet-neutron noarch 18.6.1-17.1.20230621053056.c9d467f.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 196 k puppet-nova noarch 18.6.1-17.1.20230621090443.a21eae4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 200 k puppet-nssdb noarch 1.0.2-17.1.20230620185645.2ed2a2d.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 12 k puppet-octavia noarch 18.5.1-17.1.20230621062420.842492c.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 76 k puppet-openstack_extras noarch 18.5.1-17.1.20230621042409.504e1a0.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 48 k puppet-openstacklib noarch 18.5.2-17.1.20230621052822.64d8ac6.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 62 k puppet-oslo noarch 18.5.1-17.1.20230621052358.fe2a147.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 48 k puppet-ovn noarch 18.6.1-17.1.20240925094907.7805f7e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 29 k puppet-pacemaker noarch 1.5.1-17.1.20250214161010.f10ce89.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 158 k puppet-placement noarch 5.4.3-17.1.20230621061610.e7557a5.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 41 k puppet-qdr noarch 7.4.1-17.1.20230620195605.8a575de.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 31 k puppet-rabbitmq noarch 11.0.1-17.1.20230620182519.63fee2c.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 266 k puppet-redis noarch 6.1.1-17.1.20230620191704.547105e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 51 k puppet-remote noarch 10.0.0-17.1.20250725114017.7420908.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 19 k puppet-rsync noarch 1.1.4-17.1.20250804151510.ea6397e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 28 k puppet-rsyslog noarch 4.0.1-17.1.20230620200132.2548a0d.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 58 k puppet-snmp noarch 3.9.1-17.1.20250725115759.5d73485.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 38 k puppet-ssh noarch 6.2.1-17.1.20230620190251.6e0f430.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 29 k puppet-stdlib noarch 6.3.1-17.1.20230621000857.7c1ae25.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 160 k puppet-swift noarch 18.6.1-17.1.20230621052139.f105ffc.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 115 k puppet-sysctl noarch 0.0.13-17.1.20250725113328.847ec1c.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 19 k puppet-systemd noarch 2.12.1-17.1.20230620191611.8f68b0d.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 42 k puppet-tripleo noarch 14.2.3-17.1.20250320101351.40278e1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 239 k puppet-vcsrepo noarch 3.1.1-17.1.20250725113408.a36ee18.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 53 k puppet-vswitch noarch 14.4.3-17.1.20230621043558.51e82ca.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 41 k puppet-xinetd noarch 3.3.1-17.1.20230620185435.8d460c4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 26 k python-openstackclient-lang noarch 5.5.2-17.1.20250616100909.42d9b6e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 58 k python-oslo-cache-lang noarch 2.7.1-17.1.20230621012756.d0252f6.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 19 k python-oslo-db-lang noarch 8.5.2-17.1.20240820150750.26fd6fb.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 14 k python-oslo-log-lang noarch 4.4.0-17.1.20230620205407.9b29c90.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 14 k python-oslo-middleware-lang noarch 4.2.1-17.1.20230621011225.b40ca5f.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 13 k python-oslo-policy-lang noarch 3.7.1-17.1.20230621003949.639b471.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 14 k python-oslo-versionedobjects-lang noarch 2.4.1-17.1.20230621010423.89ff171.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 14 k python-pycadf-common noarch 3.1.1-17.1.20230620210757.4179996.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 15 k python3-GitPython noarch 3.1.14-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 238 k python3-PyMySQL noarch 0.10.1-6.el9 rhel-9-for-x86_64-appstream-rpms 112 k python3-alembic noarch 1.7.5-3.el9 rhel-9-for-x86_64-appstream-rpms 900 k python3-amqp noarch 5.0.6-5.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 92 k python3-ansible-runner noarch 2.0.0a1-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 122 k python3-aodhclient noarch 2.2.0-17.1.20230620222234.b747ae3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 56 k python3-appdirs noarch 1.4.4-4.el9 rhel-9-for-x86_64-appstream-rpms 27 k python3-augeas noarch 0.5.0-25.el9 rhel-9-for-x86_64-appstream-rpms 31 k python3-barbicanclient noarch 5.3.0-17.1.20230620213453.ad49c40.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 127 k python3-bcrypt x86_64 3.1.7-7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 44 k python3-beautifulsoup4 noarch 4.9.3-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 204 k python3-boto noarch 2.45.0-8.1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 1.3 M python3-cachetools noarch 4.2.2-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 33 k python3-cinderclient noarch 7.4.1-17.1.20230620211836.4f72e6f.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 218 k python3-cliff noarch 3.7.0-17.1.20230620172206.117a100.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 95 k python3-cmd2 noarch 1.4.0-4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 284 k python3-colorama noarch 0.4.4-4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 35 k python3-croniter noarch 0.3.35-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 49 k python3-daemon noarch 2.3.0-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 39 k python3-designateclient noarch 4.2.1-17.1.20230621023138.7a8d156.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 92 k python3-dns noarch 2.2.1-2.el9 rhel-9-for-x86_64-baseos-rpms 418 k python3-docutils noarch 0.16-6.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 1.5 M python3-dogpile-cache noarch 1.1.5-5.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 89 k python3-etcd3gw noarch 0.2.6-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 44 k python3-extras noarch 1.0.0-17.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 21 k python3-fixtures noarch 3.0.0-24.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 93 k python3-futurist noarch 2.3.0-17.1.20230621104020.1a1c6f8.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 64 k python3-gevent x86_64 21.1.2-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 1.6 M python3-gitdb noarch 4.0.5-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 107 k python3-glanceclient noarch 1:3.3.0-17.1.20240712170803.f802c71.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 143 k python3-gssapi x86_64 1.6.9-5.el9 rhel-9-for-x86_64-appstream-rpms 489 k python3-heat-agent noarch 2.2.1-17.1.20230620232028.ed16cc7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 19 k python3-heat-agent-ansible noarch 2.2.1-17.1.20230620232028.ed16cc7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 11 k python3-heat-agent-apply-config noarch 2.2.1-17.1.20230620232028.ed16cc7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 9.9 k python3-heat-agent-docker-cmd noarch 2.2.1-17.1.20230620232028.ed16cc7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 12 k python3-heat-agent-hiera noarch 2.2.1-17.1.20230620232028.ed16cc7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 10 k python3-heat-agent-json-file noarch 2.2.1-17.1.20230620232028.ed16cc7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 9.8 k python3-heat-agent-puppet noarch 2.2.1-17.1.20230620232028.ed16cc7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 11 k python3-heatclient noarch 2.3.1-17.1.20230621012952.d16c245.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 158 k python3-ipaclient noarch 4.10.1-12.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 648 k python3-ipalib noarch 4.10.1-12.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 663 k python3-ironic-inspector-client noarch 4.5.0-17.1.20230620205758.3c03e21.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 67 k python3-ironicclient noarch 4.6.4-17.1.20230621041552.09b78fa.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 342 k python3-jeepney noarch 0.6.0-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 284 k python3-jmespath noarch 0.10.0-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 46 k python3-jwcrypto noarch 0.8-4.el9 rhel-9-for-x86_64-appstream-rpms 76 k python3-keyring noarch 21.8.0-4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 77 k python3-keystoneauth1 noarch 4.4.0-17.1.20240812145234.112bcae.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 409 k python3-keystoneclient noarch 1:4.3.0-17.1.20230621025111.d5cb761.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 239 k python3-keystonemiddleware noarch 9.2.0-17.1.20230620211753.3659bda.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 92 k python3-kombu noarch 1:5.0.2-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 313 k python3-ldap x86_64 3.4.3-2.el9 rhel-9-for-x86_64-appstream-rpms 259 k python3-libipa_hbac x86_64 2.8.2-5.el9_2.5 rhel-9-for-x86_64-baseos-eus-rpms 30 k python3-lockfile noarch 1:0.12.2-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 31 k python3-logutils noarch 0.3.5-15.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 47 k python3-lxml x86_64 4.6.5-3.el9 rhel-9-for-x86_64-appstream-rpms 1.2 M python3-magnumclient noarch 3.4.1-17.1.20230621053508.280acd2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 121 k python3-mako noarch 1.1.4-6.el9 rhel-9-for-x86_64-appstream-rpms 176 k python3-manilaclient noarch 2.6.4-17.1.20240830140806.7f7d7d3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 528 k python3-memcached noarch 1.59-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 40 k python3-metalsmith noarch 1.4.4-17.1.20240522060758.5e7461e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 65 k python3-migrate noarch 0.13.0-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 224 k python3-mimeparse noarch 1.6.0-16.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 16 k python3-mistralclient noarch 4.2.0-17.1.20230620220528.20a10f0.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 166 k python3-msgpack x86_64 1.0.2-4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 87 k python3-munch noarch 2.5.0-6.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 25 k python3-natsort noarch 7.1.1-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 59 k python3-neutron-lib noarch 2.10.3-17.1.20231221164814.619c0fe.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 375 k python3-neutronclient noarch 7.3.1-17.1.20230621044049.29a9f5e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 298 k python3-novaclient noarch 1:17.4.1-17.1.20230621034300.5ee4427.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 201 k python3-octaviaclient noarch 2.3.1-17.1.20231106100828.51347bc.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 91 k python3-openstackclient noarch 5.5.2-17.1.20250616100909.42d9b6e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 1.1 M python3-openstacksdk noarch 0.55.1-17.1.20250516211008.f09ed4a.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 735 k python3-openvswitch3.3 x86_64 3.3.6-141.el9fdp fast-datapath-for-rhel-9-x86_64-rpms 383 k python3-os-client-config noarch 2.1.0-17.1.20230620203151.bc96c23.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 55 k python3-os-ken noarch 1.4.1-17.1.20241205090937.018d755.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 2.1 M python3-os-service-types noarch 1.7.0-17.1.20230620201222.0b2f473.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 38 k python3-os-traits noarch 2.5.0-17.1.20230620221402.ac1b39e.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 45 k python3-osc-lib noarch 2.3.1-17.1.20230620203400.2b7a679.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 78 k python3-oslo-cache noarch 2.7.1-17.1.20230621012756.d0252f6.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 56 k python3-oslo-context noarch 3.2.1-17.1.20230620204857.b124eb7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 27 k python3-oslo-db noarch 8.5.2-17.1.20240820150750.26fd6fb.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 144 k python3-oslo-log noarch 4.4.0-17.1.20230620205407.9b29c90.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 64 k python3-oslo-messaging noarch 12.7.3-17.1.20231219130800.5d6fd1a.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 216 k python3-oslo-middleware noarch 4.2.1-17.1.20230621011225.b40ca5f.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 55 k python3-oslo-policy noarch 3.7.1-17.1.20230621003949.639b471.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 76 k python3-oslo-reports noarch 2.2.0-17.1.20230620210621.bc631ae.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 59 k python3-oslo-rootwrap noarch 6.3.1-17.1.20230621110703.1b1b960.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 46 k python3-oslo-serialization noarch 4.1.1-17.1.20230621011445.bbe5d5a.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 34 k python3-oslo-service noarch 2.5.1-17.1.20240821200745.c1e3398.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 71 k python3-oslo-upgradecheck noarch 1.3.1-17.1.20230621010138.9561ecb.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 29 k python3-oslo-versionedobjects noarch 2.4.1-17.1.20230621010423.89ff171.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 79 k python3-osprofiler noarch 3.4.0-17.1.20230620215259.5d82a02.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 130 k python3-paramiko noarch 2.11.0-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 299 k python3-passlib noarch 1.7.4-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 712 k python3-paste noarch 3.5.0-5.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 776 k python3-paste-deploy noarch 2.1.1-4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 41 k python3-pecan noarch 1.4.0-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 273 k python3-psutil x86_64 5.8.0-12.el9 rhel-9-for-x86_64-appstream-rpms 218 k python3-pyOpenSSL noarch 20.0.1-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 90 k python3-pyasn1 noarch 0.4.8-6.el9 rhel-9-for-x86_64-appstream-rpms 163 k python3-pyasn1-modules noarch 0.4.8-6.el9 rhel-9-for-x86_64-appstream-rpms 283 k python3-pycadf noarch 3.1.1-17.1.20230620210757.4179996.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 53 k python3-pynacl x86_64 1.4.0-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 111 k python3-pyperclip noarch 1.8.0-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 25 k python3-pystache noarch 0.5.4-15.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 132 k python3-pyusb noarch 1.0.2-13.el9 rhel-9-for-x86_64-appstream-rpms 96 k python3-qrcode-core noarch 6.1-12.el9 rhel-9-for-x86_64-appstream-rpms 61 k python3-repoze-lru noarch 0.7-12.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 32 k python3-requestsexceptions noarch 1.4.0-17.1.20230620164652.d7ac0ff.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 17 k python3-rhosp-openvswitch noarch 3.3-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 8.3 k python3-routes noarch 2.4.1-14.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 188 k python3-rsa noarch 4.6-3.1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 57 k python3-saharaclient noarch 3.3.0-17.1.20230620222322.401e663.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 188 k python3-secretstorage noarch 3.3.1-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 36 k python3-setproctitle x86_64 1.2.2-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 23 k python3-shade noarch 1.33.0-17.1.20230620225148.e7c7f29.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 523 k python3-simplejson x86_64 3.17.5-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 266 k python3-smmap noarch 3.0.1-4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 47 k python3-soupsieve noarch 2.2-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 67 k python3-sqlalchemy13 x86_64 1.3.24-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 1.7 M python3-sqlparse noarch 0.4.1-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 83 k python3-sss-murmur x86_64 2.8.2-5.el9_2.5 rhel-9-for-x86_64-baseos-eus-rpms 19 k python3-statsd noarch 3.2.1-20.1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 35 k python3-swiftclient noarch 3.11.1-17.1.20230620204150.06b36ae.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 136 k python3-tempita noarch 0.5.1-27.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 36 k python3-tenacity noarch 6.3.1-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 51 k python3-testtools noarch 2.4.0-10.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 333 k python3-tinyrpc noarch 1.0.3-6.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 50 k python3-tripleo-common noarch 15.4.1-17.1.20250908140822.e5b18f2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 198 k python3-troveclient noarch 7.0.0-17.1.20230620220924.c7319d8.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 344 k python3-urllib-gssapi noarch 1.0.2-4.el9 rhel-9-for-x86_64-appstream-rpms 24 k python3-validations-libs noarch 1.9.1-17.1.20230913020848.8d9e1b5.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 137 k python3-vine noarch 5.0.0-5.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 26 k python3-waitress noarch 2.0.0-4.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 109 k python3-warlock noarch 1.3.3-7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 21 k python3-wcwidth noarch 0.2.5-8.el9 rhel-9-for-x86_64-appstream-rpms 48 k python3-webob noarch 1.8.7-2.1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 230 k python3-webtest noarch 2.0.35-6.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 82 k python3-werkzeug noarch 2.0.1-7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 427 k python3-yappi x86_64 1.3.1-5.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 56 k python3-yaql noarch 1.1.3-13.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 180 k python3-yubico noarch 1.3.3-7.el9 rhel-9-for-x86_64-appstream-rpms 73 k python3-zaqarclient noarch 2.4.0-17.1.20230620214253.e388947.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 129 k python3-zope-event noarch 4.2.0-20.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 205 k python3-zope-interface x86_64 5.4.0-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 202 k qemu-img x86_64 17:7.2.0-14.el9_2.18 rhel-9-for-x86_64-appstream-eus-rpms 2.2 M rhel-system-roles noarch 1.21.2-1.el9_2 rhel-9-for-x86_64-appstream-rpms 2.4 M ruby x86_64 3.0.4-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 41 k ruby-augeas x86_64 0.5.0-29.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 29 k ruby-facter x86_64 1:3.14.19-3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 9.5 k ruby-libs x86_64 3.0.4-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 3.3 M rubygem-concurrent-ruby noarch 1.1.5-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 243 k rubygem-deep_merge noarch 1.2.1-7.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 14 k rubygem-fast_gettext noarch 1.2.0-9.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 34 k rubygem-hocon noarch 1.3.1-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 90 k rubygem-json x86_64 2.5.1-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 59 k rubygem-multi_json noarch 1.15.0-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 22 k rubygem-psych x86_64 3.3.2-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 60 k rubygem-puppet-resource_api noarch 1.8.13-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 34 k rubygem-rexml noarch 3.2.5-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 110 k rubygem-ruby-shadow x86_64 2.5.0-15.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 16 k rubygem-semantic_puppet noarch 1.0.4-2.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 21 k rubygems noarch 3.2.33-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 306 k skopeo x86_64 2:1.11.2-0.1.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 7.9 M sshpass x86_64 1.09-4.el9 rhel-9-for-x86_64-appstream-rpms 30 k tripleo-ansible noarch 3.3.1-17.1.20250804050821.8debef3.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 569 k validations-common noarch 1.9.1-17.1.20231006020828.f273ccb.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 111 k xorriso x86_64 1.5.4-4.el9 rhel-9-for-x86_64-appstream-rpms 321 k yaml-cpp x86_64 0.6.3-6.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 123 k zeromq x86_64 4.3.4-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 433 k Installing weak dependencies: bash-completion noarch 1:2.11-4.el9 rhel-9-for-x86_64-baseos-rpms 459 k perl-NDBM_File x86_64 1.15-480.el9 rhel-9-for-x86_64-appstream-rpms 25 k python3-zmq x86_64 22.0.3-1.el9ost openstack-17.1-for-rhel-9-x86_64-rpms 396 k ruby-default-gems noarch 3.0.4-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 46 k rubygem-bigdecimal x86_64 3.0.0-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 55 k rubygem-bundler noarch 2.2.33-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 456 k rubygem-io-console x86_64 0.5.7-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 26 k rubygem-rdoc noarch 6.3.3-161.el9_2.2 rhel-9-for-x86_64-appstream-eus-rpms 441 k Transaction Summary ======================================================================================================================================== Install 390 Packages Upgrade 9 Packages Total download size: 97 M Downloading Packages: (1/399): libaio-0.3.111-13.el9.x86_64.rpm 186 kB/s | 26 kB 00:00 (2/399): device-mapper-persistent-data-0.9.0-13 4.1 MB/s | 786 kB 00:00 (3/399): bash-completion-2.11-4.el9.noarch.rpm 2.2 MB/s | 459 kB 00:00 (4/399): glibc-langpack-en-2.34-60.el9.x86_64.r 4.9 MB/s | 675 kB 00:00 (5/399): krb5-pkinit-1.20.1-8.el9.x86_64.rpm 660 kB/s | 62 kB 00:00 (6/399): krb5-workstation-1.20.1-8.el9.x86_64.r 5.1 MB/s | 542 kB 00:00 (7/399): libkadm5-1.20.1-8.el9.x86_64.rpm 918 kB/s | 81 kB 00:00 (8/399): libpkgconf-1.7.3-10.el9.x86_64.rpm 419 kB/s | 37 kB 00:00 (9/399): pkgconf-1.7.3-10.el9.x86_64.rpm 486 kB/s | 45 kB 00:00 (10/399): pkgconf-m4-1.7.3-10.el9.noarch.rpm 187 kB/s | 16 kB 00:00 (11/399): pkgconf-pkg-config-1.7.3-10.el9.x86_6 147 kB/s | 12 kB 00:00 (12/399): python3-dns-2.2.1-2.el9.noarch.rpm 4.3 MB/s | 418 kB 00:00 (13/399): python3-openvswitch3.3-3.3.6-141.el9f 3.9 MB/s | 383 kB 00:00 (14/399): perl-IO-Socket-SSL-2.073-1.el9.noarch 2.3 MB/s | 223 kB 00:00 (15/399): boost-system-1.75.0-8.el9.x86_64.rpm 168 kB/s | 15 kB 00:00 (16/399): perl-Getopt-Long-2.52-4.el9.noarch.rp 696 kB/s | 64 kB 00:00 (17/399): perl-Storable-3.21-460.el9.x86_64.rpm 1.0 MB/s | 98 kB 00:00 (18/399): python3-urllib-gssapi-1.0.2-4.el9.noa 259 kB/s | 24 kB 00:00 (19/399): boost-locale-1.75.0-8.el9.x86_64.rpm 2.3 MB/s | 216 kB 00:00 (20/399): boost-nowide-1.75.0-8.el9.x86_64.rpm 186 kB/s | 17 kB 00:00 (21/399): boost-thread-1.75.0-8.el9.x86_64.rpm 615 kB/s | 57 kB 00:00 (22/399): perl-Data-Dumper-2.174-462.el9.x86_64 683 kB/s | 59 kB 00:00 (23/399): perl-Exporter-5.74-461.el9.noarch.rpm 393 kB/s | 34 kB 00:00 (24/399): perl-File-Temp-0.231.100-4.el9.noarch 716 kB/s | 63 kB 00:00 (25/399): perl-Text-Tabs+Wrap-2013.0523-460.el9 292 kB/s | 25 kB 00:00 (26/399): perl-parent-0.238-460.el9.noarch.rpm 184 kB/s | 16 kB 00:00 (27/399): python3-pyasn1-modules-0.4.8-6.el9.no 2.8 MB/s | 283 kB 00:00 (28/399): xorriso-1.5.4-4.el9.x86_64.rpm 3.4 MB/s | 321 kB 00:00 (29/399): libisofs-1.5.4-4.el9.x86_64.rpm 2.3 MB/s | 224 kB 00:00 (30/399): perl-Net-SSLeay-1.92-2.el9.x86_64.rpm 4.1 MB/s | 392 kB 00:00 (31/399): perl-PathTools-3.78-461.el9.x86_64.rp 748 kB/s | 92 kB 00:00 (32/399): perl-IO-Socket-IP-0.41-5.el9.noarch.r 266 kB/s | 45 kB 00:00 (33/399): perl-Pod-Simple-3.42-4.el9.noarch.rpm 2.4 MB/s | 229 kB 00:00 (34/399): perl-Term-ANSIColor-5.01-461.el9.noar 560 kB/s | 51 kB 00:00 (35/399): perl-Text-ParseWords-3.30-460.el9.noa 186 kB/s | 18 kB 00:00 (36/399): perl-URI-5.09-3.el9.noarch.rpm 1.4 MB/s | 125 kB 00:00 (37/399): perl-libnet-3.13-4.el9.noarch.rpm 1.4 MB/s | 134 kB 00:00 (38/399): python3-lxml-4.6.5-3.el9.x86_64.rpm 8.6 MB/s | 1.2 MB 00:00 (39/399): boost-chrono-1.75.0-8.el9.x86_64.rpm 256 kB/s | 26 kB 00:00 (40/399): iptables-nft-services-1.8.8-6.el9_1.n 314 kB/s | 28 kB 00:00 (41/399): perl-Carp-1.50-460.el9.noarch.rpm 374 kB/s | 31 kB 00:00 (42/399): perl-Encode-3.08-462.el9.x86_64.rpm 14 MB/s | 1.7 MB 00:00 (43/399): perl-Pod-Escapes-1.07-460.el9.noarch. 169 kB/s | 22 kB 00:00 (44/399): python3-psutil-5.8.0-12.el9.x86_64.rp 2.3 MB/s | 218 kB 00:00 (45/399): libburn-1.5.4-4.el9.x86_64.rpm 1.8 MB/s | 175 kB 00:00 (46/399): perl-Term-Cap-1.17-460.el9.noarch.rpm 278 kB/s | 24 kB 00:00 (47/399): perl-Time-Local-1.300-7.el9.noarch.rp 438 kB/s | 37 kB 00:00 (48/399): perl-Mozilla-CA-20200520-6.el9.noarch 86 kB/s | 14 kB 00:00 (49/399): python3-jwcrypto-0.8-4.el9.noarch.rpm 809 kB/s | 76 kB 00:00 (50/399): python3-yubico-1.3.3-7.el9.noarch.rpm 861 kB/s | 73 kB 00:00 (51/399): sshpass-1.09-4.el9.x86_64.rpm 361 kB/s | 30 kB 00:00 (52/399): boost-filesystem-1.75.0-8.el9.x86_64. 691 kB/s | 59 kB 00:00 (53/399): perl-Digest-MD5-2.58-4.el9.x86_64.rpm 446 kB/s | 39 kB 00:00 (54/399): boost-log-1.75.0-8.el9.x86_64.rpm 2.1 MB/s | 410 kB 00:00 (55/399): perl-MIME-Base64-3.16-4.el9.x86_64.rp 388 kB/s | 34 kB 00:00 (56/399): boost-program-options-1.75.0-8.el9.x8 479 kB/s | 108 kB 00:00 (57/399): boost-atomic-1.75.0-8.el9.x86_64.rpm 184 kB/s | 19 kB 00:00 (58/399): libisoburn-1.5.4-4.el9.x86_64.rpm 4.5 MB/s | 419 kB 00:00 (59/399): perl-Digest-1.19-4.el9.noarch.rpm 280 kB/s | 29 kB 00:00 (60/399): perl-File-Path-2.18-4.el9.noarch.rpm 411 kB/s | 38 kB 00:00 (61/399): perl-Pod-Perldoc-3.28.01-461.el9.noar 978 kB/s | 92 kB 00:00 (62/399): perl-Pod-Usage-2.01-4.el9.noarch.rpm 524 kB/s | 43 kB 00:00 (63/399): perl-Scalar-List-Utils-1.56-461.el9.x 853 kB/s | 77 kB 00:00 (64/399): perl-Socket-2.031-4.el9.x86_64.rpm 640 kB/s | 58 kB 00:00 (65/399): perl-constant-1.33-461.el9.noarch.rpm 280 kB/s | 25 kB 00:00 (66/399): perl-podlators-4.14-460.el9.noarch.rp 1.3 MB/s | 118 kB 00:00 (67/399): python3-PyMySQL-0.10.1-6.el9.noarch.r 1.3 MB/s | 112 kB 00:00 (68/399): python3-appdirs-1.4.4-4.el9.noarch.rp 287 kB/s | 27 kB 00:00 (69/399): python3-pyasn1-0.4.8-6.el9.noarch.rpm 1.7 MB/s | 163 kB 00:00 (70/399): python3-pyusb-1.0.2-13.el9.noarch.rpm 1.0 MB/s | 96 kB 00:00 (71/399): python3-qrcode-core-6.1-12.el9.noarch 728 kB/s | 61 kB 00:00 (72/399): python3-wcwidth-0.2.5-8.el9.noarch.rp 521 kB/s | 48 kB 00:00 (73/399): boost-regex-1.75.0-8.el9.x86_64.rpm 3.0 MB/s | 279 kB 00:00 (74/399): oniguruma-6.9.6-1.el9.5.x86_64.rpm 2.4 MB/s | 221 kB 00:00 (75/399): python3-alembic-1.7.5-3.el9.noarch.rp 8.3 MB/s | 900 kB 00:00 (76/399): python3-augeas-0.5.0-25.el9.noarch.rp 330 kB/s | 31 kB 00:00 (77/399): python3-gssapi-1.6.9-5.el9.x86_64.rpm 4.2 MB/s | 489 kB 00:00 (78/399): perl-AutoLoader-5.74-480.el9.noarch.r 248 kB/s | 23 kB 00:00 (79/399): openssl-perl-3.0.7-6.el9_2.x86_64.rpm 338 kB/s | 42 kB 00:00 (80/399): perl-B-1.80-480.el9.x86_64.rpm 2.1 MB/s | 185 kB 00:00 (81/399): perl-Class-Struct-0.66-480.el9.noarch 277 kB/s | 24 kB 00:00 (82/399): perl-Errno-1.30-480.el9.x86_64.rpm 198 kB/s | 17 kB 00:00 (83/399): perl-Fcntl-1.13-480.el9.x86_64.rpm 175 kB/s | 23 kB 00:00 (84/399): perl-File-Basename-2.85-480.el9.noarc 222 kB/s | 19 kB 00:00 (85/399): perl-File-stat-1.09-480.el9.noarch.rp 226 kB/s | 19 kB 00:00 (86/399): perl-FileHandle-2.03-480.el9.noarch.r 189 kB/s | 17 kB 00:00 (87/399): perl-Getopt-Std-1.12-480.el9.noarch.r 208 kB/s | 17 kB 00:00 (88/399): perl-IO-1.43-480.el9.x86_64.rpm 1.0 MB/s | 94 kB 00:00 (89/399): perl-IPC-Open3-1.21-480.el9.noarch.rp 295 kB/s | 25 kB 00:00 (90/399): perl-NDBM_File-1.15-480.el9.x86_64.rp 291 kB/s | 25 kB 00:00 (91/399): perl-POSIX-1.94-480.el9.x86_64.rpm 1.1 MB/s | 99 kB 00:00 (92/399): perl-SelectSaver-1.02-480.el9.noarch. 156 kB/s | 13 kB 00:00 (93/399): perl-Symbol-1.08-480.el9.noarch.rpm 182 kB/s | 16 kB 00:00 (94/399): perl-base-2.27-480.el9.noarch.rpm 195 kB/s | 18 kB 00:00 (95/399): perl-if-0.60.800-480.el9.noarch.rpm 185 kB/s | 16 kB 00:00 (96/399): perl-interpreter-5.32.1-480.el9.x86_6 844 kB/s | 75 kB 00:00 (97/399): perl-libs-5.32.1-480.el9.x86_64.rpm 18 MB/s | 2.2 MB 00:00 (98/399): perl-mro-1.23-480.el9.x86_64.rpm 369 kB/s | 31 kB 00:00 (99/399): perl-overload-1.31-480.el9.noarch.rpm 547 kB/s | 47 kB 00:00 (100/399): perl-overloading-0.02-480.el9.noarch 167 kB/s | 15 kB 00:00 (101/399): perl-subs-1.03-480.el9.noarch.rpm 161 kB/s | 13 kB 00:00 (102/399): perl-vars-1.05-480.el9.noarch.rpm 178 kB/s | 15 kB 00:00 (103/399): python3-ldap-3.4.3-2.el9.x86_64.rpm 2.8 MB/s | 259 kB 00:00 (104/399): python3-mako-1.1.4-6.el9.noarch.rpm 1.9 MB/s | 176 kB 00:00 (105/399): augeas-libs-1.13.0-3.el9.x86_64.rpm 4.9 MB/s | 459 kB 00:00 (106/399): ipa-selinux-4.10.1-6.el9.noarch.rpm 449 kB/s | 40 kB 00:00 (107/399): jq-1.6-14.el9.x86_64.rpm 2.1 MB/s | 190 kB 00:00 (108/399): libselinux-ruby-3.5-1.el9.x86_64.rpm 637 kB/s | 59 kB 00:00 (109/399): ansible-freeipa-1.9.2-3.el9_2.noarch 2.7 MB/s | 452 kB 00:00 (110/399): ipa-client-common-4.10.1-12.el9_2.2. 319 kB/s | 44 kB 00:00 (111/399): rhel-system-roles-1.21.2-1.el9_2.noa 12 MB/s | 2.4 MB 00:00 (112/399): ipa-common-4.10.1-12.el9_2.2.noarch. 6.7 MB/s | 656 kB 00:00 (113/399): python3-ipaclient-4.10.1-12.el9_2.2. 6.1 MB/s | 648 kB 00:00 (114/399): python3-ipalib-4.10.1-12.el9_2.2.noa 6.1 MB/s | 663 kB 00:00 (115/399): perl-HTTP-Tiny-0.076-461.el9_2.noarc 563 kB/s | 57 kB 00:00 (116/399): ruby-3.0.4-161.el9_2.2.x86_64.rpm 487 kB/s | 41 kB 00:00 (117/399): skopeo-1.11.2-0.1.el9_2.2.x86_64.rpm 38 MB/s | 7.9 MB 00:00 (118/399): ruby-default-gems-3.0.4-161.el9_2.2. 378 kB/s | 46 kB 00:00 (119/399): ruby-libs-3.0.4-161.el9_2.2.x86_64.r 23 MB/s | 3.3 MB 00:00 (120/399): rubygem-bigdecimal-3.0.0-161.el9_2.2 656 kB/s | 55 kB 00:00 (121/399): rubygem-bundler-2.2.33-161.el9_2.2.n 4.6 MB/s | 456 kB 00:00 (122/399): rubygem-io-console-0.5.7-161.el9_2.2 288 kB/s | 26 kB 00:00 (123/399): rubygem-json-2.5.1-161.el9_2.2.x86_6 707 kB/s | 59 kB 00:00 (124/399): rubygem-psych-3.3.2-161.el9_2.2.x86_ 705 kB/s | 60 kB 00:00 (125/399): rubygem-rdoc-6.3.3-161.el9_2.2.noarc 4.5 MB/s | 441 kB 00:00 (126/399): rubygem-rexml-3.2.5-161.el9_2.2.noar 1.3 MB/s | 110 kB 00:00 (127/399): rubygems-3.2.33-161.el9_2.2.noarch.r 3.1 MB/s | 306 kB 00:00 (128/399): libxslt-1.1.34-11.el9_2.x86_64.rpm 2.6 MB/s | 245 kB 00:00 (129/399): qemu-img-7.2.0-14.el9_2.18.x86_64.rp 18 MB/s | 2.2 MB 00:00 (130/399): buildah-1.29.5-1.el9_2.x86_64.rpm 39 MB/s | 8.6 MB 00:00 (131/399): ansible-collection-ansible-netcommon 1.4 MB/s | 181 kB 00:00 (132/399): ansible-collection-ansible-posix-1.2 1.4 MB/s | 124 kB 00:00 (133/399): ansible-collection-ansible-utils-2.3 990 kB/s | 91 kB 00:00 (134/399): ansible-collection-community-general 12 MB/s | 1.3 MB 00:00 (135/399): golang-github-vbatts-tar-split-0.11. 10 MB/s | 1.0 MB 00:00 (136/399): heat-cfntools-1.4.2-6.1.el9ost.noarc 910 kB/s | 77 kB 00:00 (137/399): libsodium-1.0.18-7.el9ost.x86_64.rpm 1.7 MB/s | 162 kB 00:00 (138/399): openpgm-5.2.122-26.el9ost.x86_64.rpm 2.0 MB/s | 178 kB 00:00 (139/399): puppet-fdio-18.2-1.20220727113954.6f 306 kB/s | 25 kB 00:00 (140/399): python3-ansible-runner-2.0.0a1-3.el9 1.4 MB/s | 122 kB 00:00 (141/399): python3-bcrypt-3.1.7-7.el9ost.x86_64 539 kB/s | 44 kB 00:00 (142/399): python3-beautifulsoup4-4.9.3-2.el9os 1.9 MB/s | 204 kB 00:00 (143/399): python3-croniter-0.3.35-2.el9ost.noa 573 kB/s | 49 kB 00:00 (144/399): python3-daemon-2.3.0-1.el9ost.noarch 451 kB/s | 39 kB 00:00 (145/399): python3-docutils-0.16-6.el9ost.noarc 14 MB/s | 1.5 MB 00:00 (146/399): python3-etcd3gw-0.2.6-2.el9ost.noarc 474 kB/s | 44 kB 00:00 (147/399): python3-gevent-21.1.2-2.el9ost.x86_6 14 MB/s | 1.6 MB 00:00 (148/399): python3-gitdb-4.0.5-2.el9ost.noarch. 1.2 MB/s | 107 kB 00:00 (149/399): python3-jeepney-0.6.0-2.el9ost.noarc 3.0 MB/s | 284 kB 00:00 (150/399): python3-jmespath-0.10.0-1.el9ost.noa 545 kB/s | 46 kB 00:00 (151/399): python3-lockfile-0.12.2-2.el9ost.noa 377 kB/s | 31 kB 00:00 (152/399): python3-logutils-0.3.5-15.el9ost.noa 562 kB/s | 47 kB 00:00 (153/399): python3-memcached-1.59-3.el9ost.noar 489 kB/s | 40 kB 00:00 (154/399): python3-mimeparse-1.6.0-16.el9ost.no 165 kB/s | 16 kB 00:00 (155/399): python3-natsort-7.1.1-2.el9ost.noarc 672 kB/s | 59 kB 00:00 (156/399): python3-migrate-0.13.0-1.el9ost.noar 1.8 MB/s | 224 kB 00:00 (157/399): python3-passlib-1.7.4-3.el9ost.noarc 7.5 MB/s | 712 kB 00:00 (158/399): python3-pecan-1.4.0-2.el9ost.noarch. 2.8 MB/s | 273 kB 00:00 (159/399): python3-pyOpenSSL-20.0.1-2.el9ost.no 883 kB/s | 90 kB 00:00 (160/399): python3-pynacl-1.4.0-1.el9ost.x86_64 1.3 MB/s | 111 kB 00:00 (161/399): python3-pyperclip-1.8.0-3.el9ost.noa 303 kB/s | 25 kB 00:00 (162/399): python3-setproctitle-1.2.2-1.el9ost. 274 kB/s | 23 kB 00:00 (163/399): python3-smmap-3.0.1-4.el9ost.noarch. 574 kB/s | 47 kB 00:00 (164/399): python3-soupsieve-2.2-1.el9ost.noarc 771 kB/s | 67 kB 00:00 (165/399): python3-sqlalchemy13-1.3.24-3.el9ost 16 MB/s | 1.7 MB 00:00 (166/399): python3-tenacity-6.3.1-1.el9ost.noar 585 kB/s | 51 kB 00:00 (167/399): python3-webtest-2.0.35-6.el9ost.noar 888 kB/s | 82 kB 00:00 (168/399): python3-zmq-22.0.3-1.el9ost.x86_64.r 4.2 MB/s | 396 kB 00:00 (169/399): python3-zope-event-4.2.0-20.el9ost.n 2.2 MB/s | 205 kB 00:00 (170/399): python3-zope-interface-5.4.0-1.el9os 2.2 MB/s | 202 kB 00:00 (171/399): rubygem-concurrent-ruby-1.1.5-2.el9o 2.5 MB/s | 243 kB 00:00 (172/399): rubygem-deep_merge-1.2.1-7.el9ost.no 154 kB/s | 14 kB 00:00 (173/399): rubygem-fast_gettext-1.2.0-9.el9ost. 377 kB/s | 34 kB 00:00 (174/399): rubygem-hocon-1.3.1-2.el9ost.noarch. 958 kB/s | 90 kB 00:00 (175/399): rubygem-multi_json-1.15.0-2.el9ost.n 261 kB/s | 22 kB 00:00 (176/399): rubygem-puppet-resource_api-1.8.13-1 409 kB/s | 34 kB 00:00 (177/399): rubygem-ruby-shadow-2.5.0-15.el9ost. 194 kB/s | 16 kB 00:00 (178/399): rubygem-semantic_puppet-1.0.4-2.el9o 220 kB/s | 21 kB 00:00 (179/399): zeromq-4.3.4-1.el9ost.x86_64.rpm 4.6 MB/s | 433 kB 00:00 (180/399): ansible-config_template-2.0.1-17.1.2 336 kB/s | 29 kB 00:00 (181/399): ansible-role-atos-hsm-1.0.1-17.1.202 208 kB/s | 17 kB 00:00 (182/399): ansible-role-chrony-1.3.1-17.1.20230 267 kB/s | 22 kB 00:00 (183/399): ansible-role-collectd-config-0.0.3-1 572 kB/s | 53 kB 00:00 (184/399): ansible-role-container-registry-1.4. 253 kB/s | 24 kB 00:00 (185/399): ansible-role-openstack-operations-0. 335 kB/s | 30 kB 00:00 (186/399): ansible-role-qdr-config-0.0.2-17.1.2 244 kB/s | 21 kB 00:00 (187/399): ansible-role-redhat-subscription-1.3 338 kB/s | 28 kB 00:00 (188/399): ansible-role-thales-hsm-3.0.1-17.1.2 237 kB/s | 21 kB 00:00 (189/399): ansible-role-tripleo-modify-image-1. 310 kB/s | 27 kB 00:00 (190/399): ansible-tripleo-ipa-0.3.1-17.1.20230 414 kB/s | 36 kB 00:00 (191/399): ansible-tripleo-ipsec-11.0.1-17.1.20 441 kB/s | 38 kB 00:00 (192/399): dib-utils-0.0.11-17.1.20230620173328 146 kB/s | 12 kB 00:00 (193/399): openstack-heat-agents-2.2.1-17.1.202 111 kB/s | 9.6 kB 00:00 (194/399): os-apply-config-13.1.1-17.1.20231020 618 kB/s | 54 kB 00:00 (195/399): os-collect-config-13.1.1-17.1.202310 567 kB/s | 50 kB 00:00 (196/399): os-refresh-config-13.1.1-17.1.202310 251 kB/s | 21 kB 00:00 (197/399): puppet-aodh-18.4.2-17.1.202306201512 717 kB/s | 62 kB 00:00 (198/399): puppet-archive-4.6.1-17.1.2023062018 476 kB/s | 44 kB 00:00 (199/399): puppet-auditd-2.2.1-17.1.20230620192 341 kB/s | 28 kB 00:00 (200/399): puppet-ceilometer-18.4.3-17.1.202309 806 kB/s | 70 kB 00:00 (201/399): puppet-certmonger-2.7.1-17.1.2023062 242 kB/s | 26 kB 00:00 (202/399): puppet-cinder-18.5.2-17.1.2023062105 1.3 MB/s | 120 kB 00:00 (203/399): puppet-collectd-13.0.1-17.1.20230620 920 kB/s | 135 kB 00:00 (204/399): puppet-corosync-8.0.1-17.1.202306211 762 kB/s | 68 kB 00:00 (205/399): puppet-designate-18.6.1-17.1.2023062 772 kB/s | 69 kB 00:00 (206/399): puppet-dns-8.2.1-17.1.20230621002045 427 kB/s | 36 kB 00:00 (207/399): puppet-gnocchi-18.4.3-17.1.202306210 655 kB/s | 58 kB 00:00 (208/399): puppet-heat-18.4.1-17.1.202306211116 819 kB/s | 70 kB 00:00 (209/399): puppet-kmod-2.5.0-17.1.2023062019064 240 kB/s | 21 kB 00:00 (210/399): puppet-etcd-1.12.3-17.1.202306201932 122 kB/s | 25 kB 00:00 (211/399): puppet-manila-18.5.2-17.1.2023110219 946 kB/s | 81 kB 00:00 (212/399): puppet-memcached-6.0.0-17.1.20230620 262 kB/s | 23 kB 00:00 (213/399): puppet-mysql-10.6.1-17.1.20230621022 1.1 MB/s | 99 kB 00:00 (214/399): puppet-neutron-18.6.1-17.1.202306210 2.2 MB/s | 196 kB 00:00 (215/399): puppet-nova-18.6.1-17.1.202306210904 2.1 MB/s | 200 kB 00:00 (216/399): puppet-nssdb-1.0.2-17.1.202306201856 137 kB/s | 12 kB 00:00 (217/399): puppet-octavia-18.5.1-17.1.202306210 888 kB/s | 76 kB 00:00 (218/399): puppet-openstack_extras-18.5.1-17.1. 535 kB/s | 48 kB 00:00 (219/399): puppet-openstacklib-18.5.2-17.1.2023 711 kB/s | 62 kB 00:00 (220/399): puppet-oslo-18.5.1-17.1.202306210523 563 kB/s | 48 kB 00:00 (221/399): puppet-placement-5.4.3-17.1.20230621 459 kB/s | 41 kB 00:00 (222/399): puppet-qdr-7.4.1-17.1.20230620195605 352 kB/s | 31 kB 00:00 (223/399): puppet-rabbitmq-11.0.1-17.1.20230620 2.9 MB/s | 266 kB 00:00 (224/399): puppet-redis-6.1.1-17.1.202306201917 583 kB/s | 51 kB 00:00 (225/399): puppet-rsyslog-4.0.1-17.1.2023062020 672 kB/s | 58 kB 00:00 (226/399): puppet-ssh-6.2.1-17.1.20230620190251 325 kB/s | 29 kB 00:00 (227/399): puppet-stdlib-6.3.1-17.1.20230621000 1.8 MB/s | 160 kB 00:00 (228/399): puppet-swift-18.6.1-17.1.20230621052 1.2 MB/s | 115 kB 00:00 (229/399): puppet-systemd-2.12.1-17.1.202306201 468 kB/s | 42 kB 00:00 (230/399): puppet-vswitch-14.4.3-17.1.202306210 486 kB/s | 41 kB 00:00 (231/399): puppet-xinetd-3.3.1-17.1.20230620185 294 kB/s | 26 kB 00:00 (232/399): python-oslo-cache-lang-2.7.1-17.1.20 207 kB/s | 19 kB 00:00 (233/399): python-oslo-log-lang-4.4.0-17.1.2023 156 kB/s | 14 kB 00:00 (234/399): python-oslo-middleware-lang-4.2.1-17 132 kB/s | 13 kB 00:00 (235/399): python-oslo-policy-lang-3.7.1-17.1.2 155 kB/s | 14 kB 00:00 (236/399): python-oslo-versionedobjects-lang-2. 136 kB/s | 14 kB 00:00 (237/399): python-pycadf-common-3.1.1-17.1.2023 131 kB/s | 15 kB 00:00 (238/399): python3-GitPython-3.1.14-2.el9ost.no 2.2 MB/s | 238 kB 00:00 (239/399): python3-aodhclient-2.2.0-17.1.202306 480 kB/s | 56 kB 00:00 (240/399): python3-barbicanclient-5.3.0-17.1.20 1.3 MB/s | 127 kB 00:00 (241/399): python3-cinderclient-7.4.1-17.1.2023 2.1 MB/s | 218 kB 00:00 (242/399): python3-cliff-3.7.0-17.1.20230620172 1.0 MB/s | 95 kB 00:00 (243/399): python3-designateclient-4.2.1-17.1.2 935 kB/s | 92 kB 00:00 (244/399): python3-futurist-2.3.0-17.1.20230621 736 kB/s | 64 kB 00:00 (245/399): python3-heat-agent-2.2.1-17.1.202306 205 kB/s | 19 kB 00:00 (246/399): python3-heat-agent-ansible-2.2.1-17. 128 kB/s | 11 kB 00:00 (247/399): python3-heat-agent-apply-config-2.2. 113 kB/s | 9.9 kB 00:00 (248/399): python3-heat-agent-docker-cmd-2.2.1- 141 kB/s | 12 kB 00:00 (249/399): python3-heat-agent-hiera-2.2.1-17.1. 126 kB/s | 10 kB 00:00 (250/399): python3-heat-agent-json-file-2.2.1-1 111 kB/s | 9.8 kB 00:00 (251/399): python3-heat-agent-puppet-2.2.1-17.1 130 kB/s | 11 kB 00:00 (252/399): python3-heatclient-2.3.1-17.1.202306 1.8 MB/s | 158 kB 00:00 (253/399): python3-ironic-inspector-client-4.5. 760 kB/s | 67 kB 00:00 (254/399): python3-ironicclient-4.6.4-17.1.2023 3.5 MB/s | 342 kB 00:00 (255/399): python3-keystoneclient-4.3.0-17.1.20 2.7 MB/s | 239 kB 00:00 (256/399): python3-keystonemiddleware-9.2.0-17. 1.0 MB/s | 92 kB 00:00 (257/399): python3-magnumclient-3.4.1-17.1.2023 1.4 MB/s | 121 kB 00:00 (258/399): python3-mistralclient-4.2.0-17.1.202 1.8 MB/s | 166 kB 00:00 (259/399): python3-novaclient-17.4.1-17.1.20230 2.0 MB/s | 201 kB 00:00 (260/399): python3-os-client-config-2.1.0-17.1. 617 kB/s | 55 kB 00:00 (261/399): python3-neutronclient-7.3.1-17.1.202 1.8 MB/s | 298 kB 00:00 (262/399): python3-os-service-types-1.7.0-17.1. 415 kB/s | 38 kB 00:00 (263/399): python3-os-traits-2.5.0-17.1.2023062 481 kB/s | 45 kB 00:00 (264/399): python3-osc-lib-2.3.1-17.1.202306202 837 kB/s | 78 kB 00:00 (265/399): python3-oslo-cache-2.7.1-17.1.202306 590 kB/s | 56 kB 00:00 (266/399): python3-oslo-context-3.2.1-17.1.2023 303 kB/s | 27 kB 00:00 (267/399): python3-oslo-log-4.4.0-17.1.20230620 656 kB/s | 64 kB 00:00 (268/399): python3-oslo-middleware-4.2.1-17.1.2 589 kB/s | 55 kB 00:00 (269/399): python3-oslo-reports-2.2.0-17.1.2023 674 kB/s | 59 kB 00:00 (270/399): python3-oslo-policy-3.7.1-17.1.20230 678 kB/s | 76 kB 00:00 (271/399): python3-oslo-rootwrap-6.3.1-17.1.202 488 kB/s | 46 kB 00:00 (272/399): python3-oslo-serialization-4.1.1-17. 369 kB/s | 34 kB 00:00 (273/399): python3-oslo-upgradecheck-1.3.1-17.1 325 kB/s | 29 kB 00:00 (274/399): python3-oslo-versionedobjects-2.4.1- 851 kB/s | 79 kB 00:00 (275/399): python3-osprofiler-3.4.0-17.1.202306 1.4 MB/s | 130 kB 00:00 (276/399): python3-pycadf-3.1.1-17.1.2023062021 571 kB/s | 53 kB 00:00 (277/399): python3-requestsexceptions-1.4.0-17. 151 kB/s | 17 kB 00:00 (278/399): python3-saharaclient-3.3.0-17.1.2023 1.7 MB/s | 188 kB 00:00 (279/399): python3-shade-1.33.0-17.1.2023062022 4.5 MB/s | 523 kB 00:00 (280/399): python3-statsd-3.2.1-20.1.el9ost.noa 363 kB/s | 35 kB 00:00 (281/399): python3-swiftclient-3.11.1-17.1.2023 1.4 MB/s | 136 kB 00:00 (282/399): python3-tinyrpc-1.0.3-6.el9ost.noarc 507 kB/s | 50 kB 00:00 (283/399): python3-troveclient-7.0.0-17.1.20230 3.4 MB/s | 344 kB 00:00 (284/399): python3-validations-libs-1.9.1-17.1. 1.4 MB/s | 137 kB 00:00 (285/399): validations-common-1.9.1-17.1.202310 1.1 MB/s | 111 kB 00:00 (286/399): python3-paramiko-2.11.0-2.el9ost.noa 2.9 MB/s | 299 kB 00:00 (287/399): ansible-pacemaker-1.0.4-17.1.2023121 231 kB/s | 21 kB 00:00 (288/399): python3-werkzeug-2.0.1-7.el9ost.noar 3.8 MB/s | 427 kB 00:00 (289/399): python3-manilaclient-2.6.4-17.1.2024 5.3 MB/s | 528 kB 00:00 (290/399): python3-sqlparse-0.4.1-2.el9ost.noar 884 kB/s | 83 kB 00:00 (291/399): python3-zaqarclient-2.4.0-17.1.20230 296 kB/s | 129 kB 00:00 (292/399): python3-webob-1.8.7-2.1.el9ost.noarc 2.5 MB/s | 230 kB 00:00 (293/399): ansible-role-metalsmith-deployment-1 237 kB/s | 21 kB 00:00 (294/399): openstack-selinux-0.8.37-17.1.202311 2.3 MB/s | 221 kB 00:00 (295/399): python-oslo-db-lang-8.5.2-17.1.20240 163 kB/s | 14 kB 00:00 (296/399): puppet-ironic-18.7.1-17.1.2024030521 1.2 MB/s | 121 kB 00:00 (297/399): python3-glanceclient-3.3.0-17.1.2024 1.2 MB/s | 143 kB 00:00 (298/399): python3-keystoneauth1-4.4.0-17.1.202 4.1 MB/s | 409 kB 00:00 (299/399): python3-metalsmith-1.4.4-17.1.202405 684 kB/s | 65 kB 00:00 (300/399): python3-neutron-lib-2.10.3-17.1.2023 3.9 MB/s | 375 kB 00:00 (301/399): python3-octaviaclient-2.3.1-17.1.202 1.0 MB/s | 91 kB 00:00 (302/399): python3-oslo-db-8.5.2-17.1.202408201 1.4 MB/s | 144 kB 00:00 (303/399): python3-oslo-service-2.5.1-17.1.2024 788 kB/s | 71 kB 00:00 (304/399): python3-oslo-messaging-12.7.3-17.1.2 2.0 MB/s | 216 kB 00:00 (305/399): python3-rhosp-openvswitch-3.3-1.el9o 95 kB/s | 8.3 kB 00:00 (306/399): python3-waitress-2.0.0-4.el9ost.noar 1.1 MB/s | 109 kB 00:00 (307/399): openstack-tripleo-validations-14.3.2 2.1 MB/s | 204 kB 00:00 (308/399): openstack-ansible-core-2.14.2-4.7.el 17 MB/s | 2.3 MB 00:00 (309/399): puppet-glance-18.6.1-17.1.2025022015 991 kB/s | 87 kB 00:00 (310/399): puppet-ovn-18.6.1-17.1.2024092509490 330 kB/s | 29 kB 00:00 (311/399): puppet-pacemaker-1.5.1-17.1.20250214 1.7 MB/s | 158 kB 00:00 (312/399): puppet-tripleo-14.2.3-17.1.202503201 2.6 MB/s | 239 kB 00:00 (313/399): python3-os-ken-1.4.1-17.1.2024120509 17 MB/s | 2.1 MB 00:00 (314/399): ansible-collection-containers-podman 2.7 MB/s | 243 kB 00:00 (315/399): ansible-collections-openstack-1.9.1- 1.6 MB/s | 146 kB 00:00 (316/399): cpp-hocon-0.3.0-7.el9ost.x86_64.rpm 3.8 MB/s | 351 kB 00:00 (317/399): facter-3.14.19-3.el9ost.x86_64.rpm 5.2 MB/s | 518 kB 00:00 (318/399): hiera-3.6.0-6.el9ost.noarch.rpm 382 kB/s | 33 kB 00:00 (319/399): leatherman-1.12.6-5.el9ost.x86_64.rp 4.1 MB/s | 405 kB 00:00 (320/399): puppet-7.10.0-4.el9ost.noarch.rpm 118 kB/s | 11 kB 00:00 (321/399): puppet-headless-7.10.0-4.el9ost.noar 13 MB/s | 1.4 MB 00:00 (322/399): python3-amqp-5.0.6-5.el9ost.noarch.r 995 kB/s | 92 kB 00:00 (323/399): python3-boto-2.45.0-8.1.el9ost.noarc 12 MB/s | 1.3 MB 00:00 (324/399): python3-cachetools-4.2.2-3.el9ost.no 396 kB/s | 33 kB 00:00 (325/399): python3-cmd2-1.4.0-4.el9ost.noarch.r 2.3 MB/s | 284 kB 00:00 (326/399): python3-colorama-0.4.4-4.el9ost.noar 389 kB/s | 35 kB 00:00 (327/399): python3-dogpile-cache-1.1.5-5.el9ost 1.0 MB/s | 89 kB 00:00 (328/399): python3-extras-1.0.0-17.el9ost.noarc 231 kB/s | 21 kB 00:00 (329/399): python3-fixtures-3.0.0-24.el9ost.noa 1.0 MB/s | 93 kB 00:00 (330/399): python3-keyring-21.8.0-4.el9ost.noar 878 kB/s | 77 kB 00:00 (331/399): python3-kombu-5.0.2-3.el9ost.noarch. 3.4 MB/s | 313 kB 00:00 (332/399): python3-msgpack-1.0.2-4.el9ost.x86_6 987 kB/s | 87 kB 00:00 (333/399): python3-munch-2.5.0-6.el9ost.noarch. 272 kB/s | 25 kB 00:00 (334/399): python3-openstacksdk-0.55.1-17.1.202 7.0 MB/s | 735 kB 00:00 (335/399): python3-paste-3.5.0-5.el9ost.noarch. 6.9 MB/s | 776 kB 00:00 (336/399): python3-paste-deploy-2.1.1-4.el9ost. 470 kB/s | 41 kB 00:00 (337/399): python3-pystache-0.5.4-15.el9ost.noa 1.4 MB/s | 132 kB 00:00 (338/399): python3-routes-2.4.1-14.el9ost.noarc 2.0 MB/s | 188 kB 00:00 (339/399): python3-rsa-4.6-3.1.el9ost.noarch.rp 632 kB/s | 57 kB 00:00 (340/399): python3-secretstorage-3.3.1-3.el9ost 421 kB/s | 36 kB 00:00 (341/399): python3-repoze-lru-0.7-12.el9ost.noa 153 kB/s | 32 kB 00:00 (342/399): python3-simplejson-3.17.5-3.el9ost.x 2.8 MB/s | 266 kB 00:00 (343/399): python3-tempita-0.5.1-27.el9ost.noar 410 kB/s | 36 kB 00:00 (344/399): python3-testtools-2.4.0-10.el9ost.no 3.4 MB/s | 333 kB 00:00 (345/399): python3-vine-5.0.0-5.el9ost.noarch.r 289 kB/s | 26 kB 00:00 (346/399): python3-warlock-1.3.3-7.el9ost.noarc 241 kB/s | 21 kB 00:00 (347/399): python3-yappi-1.3.1-5.el9ost.x86_64. 661 kB/s | 56 kB 00:00 (348/399): python3-yaql-1.1.3-13.el9ost.noarch. 2.0 MB/s | 180 kB 00:00 (349/399): ruby-augeas-0.5.0-29.el9ost.x86_64.r 326 kB/s | 29 kB 00:00 (350/399): ruby-facter-3.14.19-3.el9ost.x86_64. 111 kB/s | 9.5 kB 00:00 (351/399): yaml-cpp-0.6.3-6.el9ost.x86_64.rpm 1.3 MB/s | 123 kB 00:00 (352/399): ansible-role-lunasa-hsm-1.1.1-17.1.2 219 kB/s | 19 kB 00:00 (353/399): openstack-heat-api-16.1.1-17.1.20250 194 kB/s | 16 kB 00:00 (354/399): openstack-heat-monolith-16.1.1-17.1. 187 kB/s | 16 kB 00:00 (355/399): openstack-heat-common-16.1.1-17.1.20 12 MB/s | 1.6 MB 00:00 (356/399): openstack-heat-engine-16.1.1-17.1.20 93 kB/s | 15 kB 00:00 (357/399): puppet-barbican-18.4.2-17.1.20250625 640 kB/s | 57 kB 00:00 (358/399): python-openstackclient-lang-5.5.2-17 652 kB/s | 58 kB 00:00 (359/399): python3-openstackclient-5.5.2-17.1.2 10 MB/s | 1.1 MB 00:00 (360/399): openstack-tripleo-common-15.4.1-17.1 552 kB/s | 48 kB 00:00 (361/399): openstack-tripleo-common-containers- 851 kB/s | 74 kB 00:00 (362/399): puppet-apache-6.5.2-17.1.20250811150 2.7 MB/s | 253 kB 00:00 (363/399): puppet-concat-6.2.1-17.1.20250725131 487 kB/s | 44 kB 00:00 (364/399): puppet-firewall-3.4.1-17.1.202508041 836 kB/s | 73 kB 00:00 (365/399): puppet-haproxy-4.2.2-17.1.2025072511 635 kB/s | 55 kB 00:00 (366/399): puppet-inifile-4.2.1-17.1.2025080415 525 kB/s | 46 kB 00:00 (367/399): puppet-git-0.5.0-17.1.20250725114148 176 kB/s | 26 kB 00:00 (368/399): puppet-ipaclient-2.5.2-17.1.20250725 246 kB/s | 21 kB 00:00 (369/399): puppet-module-data-0.5.1-17.1.202507 183 kB/s | 16 kB 00:00 (370/399): puppet-remote-10.0.0-17.1.2025072511 227 kB/s | 19 kB 00:00 (371/399): puppet-keepalived-0.0.2-17.1.2025072 86 kB/s | 17 kB 00:00 (372/399): puppet-rsync-1.1.4-17.1.202508041515 308 kB/s | 28 kB 00:00 (373/399): puppet-snmp-3.9.1-17.1.2025072511575 443 kB/s | 38 kB 00:00 (374/399): puppet-sysctl-0.0.13-17.1.2025072511 205 kB/s | 19 kB 00:00 (375/399): puppet-vcsrepo-3.1.1-17.1.2025072511 604 kB/s | 53 kB 00:00 (376/399): python3-tripleo-common-15.4.1-17.1.2 2.1 MB/s | 198 kB 00:00 (377/399): python3-tripleoclient-16.5.1-17.1.20 5.8 MB/s | 552 kB 00:00 (378/399): tripleo-ansible-3.3.1-17.1.202508040 5.9 MB/s | 569 kB 00:00 (379/399): openstack-tripleo-heat-templates-14. 5.9 MB/s | 611 kB 00:00 (380/399): puppet-horizon-18.6.1-17.1.202510081 629 kB/s | 59 kB 00:00 (381/399): puppet-keystone-18.6.1-17.1.20251008 1.4 MB/s | 126 kB 00:00 (382/399): libipa_hbac-2.8.2-5.el9_2.5.x86_64.r 383 kB/s | 36 kB 00:00 (383/399): python3-libipa_hbac-2.8.2-5.el9_2.5. 342 kB/s | 30 kB 00:00 (384/399): python3-sss-murmur-2.8.2-5.el9_2.5.x 218 kB/s | 19 kB 00:00 (385/399): device-mapper-event-1.02.187-7.el9_2 360 kB/s | 32 kB 00:00 (386/399): device-mapper-event-libs-1.02.187-7. 354 kB/s | 31 kB 00:00 (387/399): lvm2-2.03.17-7.el9_2.2.x86_64.rpm 14 MB/s | 1.5 MB 00:00 (388/399): lvm2-libs-2.03.17-7.el9_2.2.x86_64.r 9.0 MB/s | 1.0 MB 00:00 (389/399): libunwind-1.6.2-1.el9cp.x86_64.rpm 564 kB/s | 68 kB 00:00 (390/399): cephadm-18.2.1-361.el9cp.noarch.rpm 3.4 MB/s | 354 kB 00:00 (391/399): libfdisk-2.37.4-11.el9_2.x86_64.rpm 1.7 MB/s | 156 kB 00:00 (392/399): libmount-2.37.4-11.el9_2.x86_64.rpm 1.5 MB/s | 136 kB 00:00 (393/399): libuuid-2.37.4-11.el9_2.x86_64.rpm 344 kB/s | 30 kB 00:00 (394/399): util-linux-2.37.4-11.el9_2.x86_64.rp 19 MB/s | 2.3 MB 00:00 (395/399): util-linux-core-2.37.4-11.el9_2.x86_ 4.7 MB/s | 464 kB 00:00 (396/399): libblkid-2.37.4-11.el9_2.x86_64.rpm 1.2 MB/s | 109 kB 00:00 (397/399): libsmartcols-2.37.4-11.el9_2.x86_64. 727 kB/s | 65 kB 00:00 (398/399): device-mapper-1.02.187-7.el9_2.2.x86 1.5 MB/s | 142 kB 00:00 (399/399): device-mapper-libs-1.02.187-7.el9_2. 1.8 MB/s | 176 kB 00:00 -------------------------------------------------------------------------------- Total 7.4 MB/s | 97 MB 00:13 Running transaction check Transaction check succeeded. Running transaction test Transaction test succeeded. Running transaction Running scriptlet: rhel-system-roles-1.21.2-1.el9_2.noarch 1/1 Preparing : 1/1 Installing : ruby-libs-3.0.4-161.el9_2.2.x86_64 1/408 Installing : python3-webob-1.8.7-2.1.el9ost.noarch 2/408 Installing : rubygem-bigdecimal-3.0.0-161.el9_2.2.x86_64 3/408 Installing : ruby-default-gems-3.0.4-161.el9_2.2.noarch 4/408 Installing : rubygem-bundler-2.2.33-161.el9_2.2.noarch 5/408 Installing : rubygem-io-console-0.5.7-161.el9_2.2.x86_64 6/408 Installing : rubygem-json-2.5.1-161.el9_2.2.x86_64 7/408 Installing : rubygem-psych-3.3.2-161.el9_2.2.x86_64 8/408 Installing : rubygem-rdoc-6.3.3-161.el9_2.2.noarch 9/408 Installing : rubygems-3.2.33-161.el9_2.2.noarch 10/408 Installing : ruby-3.0.4-161.el9_2.2.x86_64 11/408 Installing : python3-simplejson-3.17.5-3.el9ost.x86_64 12/408 Installing : python3-oslo-context-3.2.1-17.1.20230620204857.b 13/408 Upgrading : libuuid-2.37.4-11.el9_2.x86_64 14/408 Upgrading : libblkid-2.37.4-11.el9_2.x86_64 15/408 Running scriptlet: libblkid-2.37.4-11.el9_2.x86_64 15/408 Installing : python3-sqlalchemy13-1.3.24-3.el9ost.x86_64 16/408 Installing : python3-pyasn1-0.4.8-6.el9.noarch 17/408 Installing : python3-psutil-5.8.0-12.el9.x86_64 18/408 Installing : boost-regex-1.75.0-8.el9.x86_64 19/408 Installing : boost-system-1.75.0-8.el9.x86_64 20/408 Installing : boost-filesystem-1.75.0-8.el9.x86_64 21/408 Installing : libaio-0.3.111-13.el9.x86_64 22/408 Installing : boost-thread-1.75.0-8.el9.x86_64 23/408 Installing : augeas-libs-1.13.0-3.el9.x86_64 24/408 Installing : python3-mako-1.1.4-6.el9.noarch 25/408 Installing : python3-dogpile-cache-1.1.5-5.el9ost.noarch 26/408 Installing : boost-chrono-1.75.0-8.el9.x86_64 27/408 Installing : boost-locale-1.75.0-8.el9.x86_64 28/408 Installing : python3-pyasn1-modules-0.4.8-6.el9.noarch 29/408 Upgrading : libmount-2.37.4-11.el9_2.x86_64 30/408 Installing : perl-Digest-1.19-4.el9.noarch 31/408 Installing : perl-Digest-MD5-2.58-4.el9.x86_64 32/408 Installing : perl-B-1.80-480.el9.x86_64 33/408 Installing : perl-FileHandle-2.03-480.el9.noarch 34/408 Installing : perl-Data-Dumper-2.174-462.el9.x86_64 35/408 Installing : perl-libnet-3.13-4.el9.noarch 36/408 Installing : perl-AutoLoader-5.74-480.el9.noarch 37/408 Installing : perl-base-2.27-480.el9.noarch 38/408 Installing : perl-URI-5.09-3.el9.noarch 39/408 Installing : perl-Time-Local-2:1.300-7.el9.noarch 40/408 Installing : perl-Mozilla-CA-20200520-6.el9.noarch 41/408 Installing : perl-if-0.60.800-480.el9.noarch 42/408 Installing : perl-IO-Socket-IP-0.41-5.el9.noarch 43/408 Installing : perl-Text-Tabs+Wrap-2013.0523-460.el9.noarch 44/408 Installing : perl-Net-SSLeay-1.92-2.el9.x86_64 45/408 Installing : perl-Pod-Escapes-1:1.07-460.el9.noarch 46/408 Installing : perl-File-Path-2.18-4.el9.noarch 47/408 Installing : perl-IO-Socket-SSL-2.073-1.el9.noarch 48/408 Installing : perl-Term-ANSIColor-5.01-461.el9.noarch 49/408 Installing : perl-Class-Struct-0.66-480.el9.noarch 50/408 Installing : perl-POSIX-1.94-480.el9.x86_64 51/408 Installing : perl-IPC-Open3-1.21-480.el9.noarch 52/408 Installing : perl-subs-1.03-480.el9.noarch 53/408 Installing : perl-Pod-Simple-1:3.42-4.el9.noarch 54/408 Installing : perl-Term-Cap-1.17-460.el9.noarch 55/408 Installing : perl-File-Temp-1:0.231.100-4.el9.noarch 56/408 Installing : perl-HTTP-Tiny-0.076-461.el9_2.noarch 57/408 Installing : perl-Socket-4:2.031-4.el9.x86_64 58/408 Installing : perl-SelectSaver-1.02-480.el9.noarch 59/408 Installing : perl-Symbol-1.08-480.el9.noarch 60/408 Installing : perl-File-stat-1.09-480.el9.noarch 61/408 Installing : perl-podlators-1:4.14-460.el9.noarch 62/408 Installing : perl-Pod-Perldoc-3.28.01-461.el9.noarch 63/408 Installing : perl-Text-ParseWords-3.30-460.el9.noarch 64/408 Installing : perl-Fcntl-1.13-480.el9.x86_64 65/408 Installing : perl-mro-1.23-480.el9.x86_64 66/408 Installing : perl-IO-1.43-480.el9.x86_64 67/408 Installing : perl-overloading-0.02-480.el9.noarch 68/408 Installing : perl-Pod-Usage-4:2.01-4.el9.noarch 69/408 Installing : perl-parent-1:0.238-460.el9.noarch 70/408 Installing : perl-MIME-Base64-3.16-4.el9.x86_64 71/408 Installing : perl-Scalar-List-Utils-4:1.56-461.el9.x86_64 72/408 Installing : perl-constant-1.33-461.el9.noarch 73/408 Installing : perl-Errno-1.30-480.el9.x86_64 74/408 Installing : perl-File-Basename-2.85-480.el9.noarch 75/408 Installing : perl-Getopt-Std-1.12-480.el9.noarch 76/408 Installing : perl-Storable-1:3.21-460.el9.x86_64 77/408 Installing : perl-overload-1.31-480.el9.noarch 78/408 Installing : perl-vars-1.05-480.el9.noarch 79/408 Installing : perl-Getopt-Long-1:2.52-4.el9.noarch 80/408 Installing : perl-Exporter-5.74-461.el9.noarch 81/408 Installing : perl-Carp-1.50-460.el9.noarch 82/408 Installing : perl-PathTools-3.78-461.el9.x86_64 83/408 Installing : perl-NDBM_File-1.15-480.el9.x86_64 84/408 Installing : perl-Encode-4:3.08-462.el9.x86_64 85/408 Installing : perl-libs-4:5.32.1-480.el9.x86_64 86/408 Installing : perl-interpreter-4:5.32.1-480.el9.x86_64 87/408 Upgrading : libsmartcols-2.37.4-11.el9_2.x86_64 88/408 Installing : python3-vine-5.0.0-5.el9ost.noarch 89/408 Installing : python3-amqp-5.0.6-5.el9ost.noarch 90/408 Installing : python3-tempita-0.5.1-27.el9ost.noarch 91/408 Installing : python3-munch-2.5.0-6.el9ost.noarch 92/408 Installing : python3-msgpack-1.0.2-4.el9ost.x86_64 93/408 Installing : python3-oslo-serialization-4.1.1-17.1.2023062101 94/408 Installing : python3-requestsexceptions-1.4.0-17.1.2023062016 95/408 Installing : python3-os-service-types-1.7.0-17.1.202306202012 96/408 Installing : python3-keystoneauth1-4.4.0-17.1.20240812145234. 97/408 Installing : python3-novaclient-1:17.4.1-17.1.20230621034300. 98/408 Installing : python3-cinderclient-7.4.1-17.1.20230620211836.4 99/408 Installing : python3-futurist-2.3.0-17.1.20230621104020.1a1c6 100/408 Installing : dib-utils-0.0.11-17.1.20230620173328.51661c3.el9 101/408 Installing : os-refresh-config-13.1.1-17.1.20231020000854.812 102/408 Installing : python3-zope-event-4.2.0-20.el9ost.noarch 103/408 Installing : python3-tenacity-6.3.1-1.el9ost.noarch 104/408 Installing : python3-pyOpenSSL-20.0.1-2.el9ost.noarch 105/408 Installing : python3-paste-3.5.0-5.el9ost.noarch 106/408 Installing : python3-paste-deploy-2.1.1-4.el9ost.noarch 107/408 Installing : python3-passlib-1.7.4-3.el9ost.noarch 108/408 Installing : python3-jmespath-0.10.0-1.el9ost.noarch 109/408 Installing : python3-jeepney-0.6.0-2.el9ost.noarch 110/408 Installing : libsodium-1.0.18-7.el9ost.x86_64 111/408 Installing : python3-gssapi-1.6.9-5.el9.x86_64 112/408 Installing : python3-qrcode-core-6.1-12.el9.noarch 113/408 Installing : python3-pyusb-1.0.2-13.el9.noarch 114/408 Installing : python3-appdirs-1.4.4-4.el9.noarch 115/408 Installing : python3-openstacksdk-0.55.1-17.1.20250516211008. 116/408 Installing : python3-os-client-config-2.1.0-17.1.202306202031 117/408 Installing : python3-metalsmith-1.4.4-17.1.20240522060758.5e7 118/408 Installing : python3-openvswitch3.3-3.3.6-141.el9fdp.x86_64 119/408 Installing : python3-dns-2.2.1-2.el9.noarch 120/408 Installing : python3-rhosp-openvswitch-3.3-1.el9ost.noarch 121/408 Installing : python3-shade-1.33.0-17.1.20230620225148.e7c7f29 122/408 Installing : python3-yubico-1.3.3-7.el9.noarch 123/408 Installing : python3-urllib-gssapi-1.0.2-4.el9.noarch 124/408 Installing : python3-pynacl-1.4.0-1.el9ost.x86_64 125/408 Installing : python3-secretstorage-3.3.1-3.el9ost.noarch 126/408 Installing : python3-keyring-21.8.0-4.el9ost.noarch 127/408 Installing : python3-keystoneclient-1:4.3.0-17.1.202306210251 128/408 Installing : python3-swiftclient-3.11.1-17.1.20230620204150.0 129/408 Installing : python3-zope-interface-5.4.0-1.el9ost.x86_64 130/408 Installing : python3-gevent-21.1.2-2.el9ost.x86_64 131/408 Installing : python3-etcd3gw-0.2.6-2.el9ost.noarch 132/408 Installing : python3-oslo-reports-2.2.0-17.1.20230620210621.b 133/408 Installing : python3-kombu-1:5.0.2-3.el9ost.noarch 134/408 Upgrading : util-linux-core-2.37.4-11.el9_2.x86_64 135/408 Running scriptlet: util-linux-core-2.37.4-11.el9_2.x86_64 135/408 Installing : openssl-perl-1:3.0.7-6.el9_2.x86_64 136/408 Installing : python3-ldap-3.4.3-2.el9.x86_64 137/408 Installing : python3-alembic-1.7.5-3.el9.noarch 138/408 Installing : python3-augeas-0.5.0-25.el9.noarch 139/408 Installing : ruby-augeas-0.5.0-29.el9ost.x86_64 140/408 Installing : device-mapper-persistent-data-0.9.0-13.el9.x86_6 141/408 Installing : qemu-img-17:7.2.0-14.el9_2.18.x86_64 142/408 Installing : python3-rsa-4.6-3.1.el9ost.noarch 143/408 Installing : python3-boto-2.45.0-8.1.el9ost.noarch 144/408 Installing : heat-cfntools-1.4.2-6.1.el9ost.noarch 145/408 Upgrading : libfdisk-2.37.4-11.el9_2.x86_64 146/408 Upgrading : util-linux-2.37.4-11.el9_2.x86_64 147/408 Upgrading : device-mapper-libs-9:1.02.187-7.el9_2.2.x86_64 148/408 Upgrading : device-mapper-9:1.02.187-7.el9_2.2.x86_64 149/408 Installing : buildah-1:1.29.5-1.el9_2.x86_64 150/408 Installing : device-mapper-event-libs-9:1.02.187-7.el9_2.2.x8 151/408 Installing : device-mapper-event-9:1.02.187-7.el9_2.2.x86_64 152/408 Running scriptlet: device-mapper-event-9:1.02.187-7.el9_2.2.x86_64 152/408 Created symlink /etc/systemd/system/sockets.target.wants/dm-event.socket → /usr/lib/systemd/system/dm-event.socket. Installing : lvm2-libs-9:2.03.17-7.el9_2.2.x86_64 153/408 Installing : lvm2-9:2.03.17-7.el9_2.2.x86_64 154/408 Running scriptlet: lvm2-9:2.03.17-7.el9_2.2.x86_64 154/408 Created symlink /etc/systemd/system/sysinit.target.wants/lvm2-monitor.service → /usr/lib/systemd/system/lvm2-monitor.service. Created symlink /etc/systemd/system/sysinit.target.wants/lvm2-lvmpolld.socket → /usr/lib/systemd/system/lvm2-lvmpolld.socket. Installing : skopeo-2:1.11.2-0.1.el9_2.2.x86_64 155/408 Installing : python3-pystache-0.5.4-15.el9ost.noarch 156/408 Installing : os-apply-config-13.1.1-17.1.20231020001757.3c7f9 157/408 Installing : rubygem-fast_gettext-1.2.0-9.el9ost.noarch 158/408 Installing : rubygem-hocon-1.3.1-2.el9ost.noarch 159/408 Installing : hiera-3.6.0-6.el9ost.noarch 160/408 Installing : rubygem-rexml-3.2.5-161.el9_2.2.noarch 161/408 Installing : rubygem-concurrent-ruby-1.1.5-2.el9ost.noarch 162/408 Installing : rubygem-deep_merge-1.2.1-7.el9ost.noarch 163/408 Installing : rubygem-multi_json-1.15.0-2.el9ost.noarch 164/408 Installing : rubygem-ruby-shadow-2.5.0-15.el9ost.x86_64 165/408 Installing : rubygem-semantic_puppet-1.0.4-2.el9ost.noarch 166/408 Installing : libselinux-ruby-3.5-1.el9.x86_64 167/408 Installing : libunwind-1.6.2-1.el9cp.x86_64 168/408 Installing : python3-sss-murmur-2.8.2-5.el9_2.5.x86_64 169/408 Installing : libipa_hbac-2.8.2-5.el9_2.5.x86_64 170/408 Installing : python3-libipa_hbac-2.8.2-5.el9_2.5.x86_64 171/408 Installing : openstack-tripleo-common-containers-15.4.1-17.1. 172/408 Installing : python-openstackclient-lang-5.5.2-17.1.202506161 173/408 Installing : yaml-cpp-0.6.3-6.el9ost.x86_64 174/408 Installing : python3-yaql-1.1.3-13.el9ost.noarch 175/408 Installing : python3-yappi-1.3.1-5.el9ost.x86_64 176/408 Installing : python3-warlock-1.3.3-7.el9ost.noarch 177/408 Installing : python3-glanceclient-1:3.3.0-17.1.20240712170803 178/408 Installing : python3-repoze-lru-0.7-12.el9ost.noarch 179/408 Installing : python3-routes-2.4.1-14.el9ost.noarch 180/408 Installing : python3-extras-1.0.0-17.el9ost.noarch 181/408 Installing : python3-colorama-0.4.4-4.el9ost.noarch 182/408 Installing : python3-cachetools-4.2.2-3.el9ost.noarch 183/408 Installing : python3-waitress-2.0.0-4.el9ost.noarch 184/408 Installing : python-oslo-db-lang-8.5.2-17.1.20240820150750.26 185/408 Installing : openstack-selinux-0.8.37-17.1.20231107080825.05d 186/408 Running scriptlet: openstack-selinux-0.8.37-17.1.20231107080825.05d 186/408 Installing : python3-sqlparse-0.4.1-2.el9ost.noarch 187/408 Installing : python3-migrate-0.13.0-1.el9ost.noarch 188/408 Installing : python3-werkzeug-2.0.1-7.el9ost.noarch 189/408 Installing : python3-statsd-3.2.1-20.1.el9ost.noarch 190/408 Installing : python3-oslo-rootwrap-6.3.1-17.1.20230621110703. 191/408 Installing : python3-os-traits-2.5.0-17.1.20230620221402.ac1b 192/408 Installing : python-pycadf-common-3.1.1-17.1.20230620210757.4 193/408 Installing : python3-pycadf-3.1.1-17.1.20230620210757.4179996 194/408 Installing : python-oslo-versionedobjects-lang-2.4.1-17.1.202 195/408 Installing : python-oslo-policy-lang-3.7.1-17.1.2023062100394 196/408 Installing : python3-oslo-policy-3.7.1-17.1.20230621003949.63 197/408 Installing : python3-oslo-upgradecheck-1.3.1-17.1.20230621010 198/408 Installing : python-oslo-middleware-lang-4.2.1-17.1.202306210 199/408 Installing : python3-oslo-middleware-4.2.1-17.1.2023062101122 200/408 Installing : python-oslo-log-lang-4.4.0-17.1.20230620205407.9 201/408 Installing : python3-oslo-log-4.4.0-17.1.20230620205407.9b29c 202/408 Installing : python-oslo-cache-lang-2.7.1-17.1.20230621012756 203/408 Installing : python3-soupsieve-2.2-1.el9ost.noarch 204/408 Installing : python3-smmap-3.0.1-4.el9ost.noarch 205/408 Installing : python3-gitdb-4.0.5-2.el9ost.noarch 206/408 Installing : python3-GitPython-3.1.14-2.el9ost.noarch 207/408 Installing : python3-setproctitle-1.2.2-1.el9ost.x86_64 208/408 Installing : python3-pyperclip-1.8.0-3.el9ost.noarch 209/408 Installing : python3-natsort-7.1.1-2.el9ost.noarch 210/408 Installing : python3-croniter-0.3.35-2.el9ost.noarch 211/408 Installing : python3-mimeparse-1.6.0-16.el9ost.noarch 212/408 Installing : python3-fixtures-3.0.0-24.el9ost.noarch 213/408 Installing : python3-testtools-2.4.0-10.el9ost.noarch 214/408 Installing : python3-oslo-service-2.5.1-17.1.20240821200745.c 215/408 Installing : python3-oslo-messaging-12.7.3-17.1.2023121913080 216/408 Installing : python3-osprofiler-3.4.0-17.1.20230620215259.5d8 217/408 Installing : python3-oslo-versionedobjects-2.4.1-17.1.2023062 218/408 Installing : python3-memcached-1.59-3.el9ost.noarch 219/408 Installing : python3-oslo-cache-2.7.1-17.1.20230621012756.d02 220/408 Installing : python3-keystonemiddleware-9.2.0-17.1.2023062021 221/408 Installing : python3-logutils-0.3.5-15.el9ost.noarch 222/408 Installing : python3-lockfile-1:0.12.2-2.el9ost.noarch 223/408 Installing : python3-docutils-0.16-6.el9ost.noarch 224/408 Installing : python3-daemon-2.3.0-1.el9ost.noarch 225/408 Installing : python3-bcrypt-3.1.7-7.el9ost.x86_64 226/408 Installing : python3-paramiko-2.11.0-2.el9ost.noarch 227/408 Installing : openpgm-5.2.122-26.el9ost.x86_64 228/408 Installing : zeromq-4.3.4-1.el9ost.x86_64 229/408 Installing : python3-zmq-22.0.3-1.el9ost.x86_64 230/408 Installing : python3-tinyrpc-1.0.3-6.el9ost.noarch 231/408 Installing : golang-github-vbatts-tar-split-0.11.1-9.el9ost.x 232/408 Installing : libxslt-1.1.34-11.el9_2.x86_64 233/408 Installing : python3-beautifulsoup4-4.9.3-2.el9ost.noarch 234/408 Installing : python3-lxml-4.6.5-3.el9.x86_64 235/408 Installing : python3-os-ken-1.4.1-17.1.20241205090937.018d755 236/408 warning: group neutron does not exist - using root Installing : python3-webtest-2.0.35-6.el9ost.noarch 237/408 Installing : python3-pecan-1.4.0-2.el9ost.noarch 238/408 Installing : ipa-client-common-4.10.1-12.el9_2.2.noarch 239/408 Running scriptlet: ipa-selinux-4.10.1-6.el9.noarch 240/408 Installing : ipa-selinux-4.10.1-6.el9.noarch 240/408 Running scriptlet: ipa-selinux-4.10.1-6.el9.noarch 240/408 Installing : ipa-common-4.10.1-12.el9_2.2.noarch 241/408 Installing : oniguruma-6.9.6-1.el9.5.x86_64 242/408 Installing : jq-1.6-14.el9.x86_64 243/408 Installing : python3-wcwidth-0.2.5-8.el9.noarch 244/408 Installing : python3-cmd2-1.4.0-4.el9ost.noarch 245/408 Installing : python3-cliff-3.7.0-17.1.20230620172206.117a100. 246/408 Installing : python3-osc-lib-2.3.1-17.1.20230620203400.2b7a67 247/408 Installing : python3-heatclient-2.3.1-17.1.20230621012952.d16 248/408 Installing : python3-neutronclient-7.3.1-17.1.20230621044049. 249/408 Installing : python3-zaqarclient-2.4.0-17.1.20230620214253.e3 250/408 Installing : python3-ironic-inspector-client-4.5.0-17.1.20230 251/408 Installing : python3-mistralclient-4.2.0-17.1.20230620220528. 252/408 Installing : python3-troveclient-7.0.0-17.1.20230620220924.c7 253/408 Installing : os-collect-config-13.1.1-17.1.20231020001748.761 254/408 Running scriptlet: os-collect-config-13.1.1-17.1.20231020001748.761 254/408 Installing : python3-heat-agent-2.2.1-17.1.20230620232028.ed1 255/408 Installing : python3-heat-agent-apply-config-2.2.1-17.1.20230 256/408 Installing : python3-heat-agent-docker-cmd-2.2.1-17.1.2023062 257/408 Installing : python3-heat-agent-hiera-2.2.1-17.1.202306202320 258/408 Installing : python3-heat-agent-json-file-2.2.1-17.1.20230620 259/408 Installing : python3-aodhclient-2.2.0-17.1.20230620222234.b74 260/408 Installing : python3-designateclient-4.2.1-17.1.2023062102313 261/408 Installing : python3-magnumclient-3.4.1-17.1.20230621053508.2 262/408 Installing : python3-manilaclient-2.6.4-17.1.20240830140806.7 263/408 Installing : python3-barbicanclient-5.3.0-17.1.20230620213453 264/408 Installing : python3-PyMySQL-0.10.1-6.el9.noarch 265/408 Installing : python3-oslo-db-8.5.2-17.1.20240820150750.26fd6f 266/408 Installing : python3-neutron-lib-2.10.3-17.1.20231221164814.6 267/408 Installing : boost-atomic-1.75.0-8.el9.x86_64 268/408 Installing : boost-log-1.75.0-8.el9.x86_64 269/408 Installing : boost-program-options-1.75.0-8.el9.x86_64 270/408 Installing : sshpass-1.09-4.el9.x86_64 271/408 Installing : openstack-ansible-core-2.14.2-4.7.el9ost.x86_64 272/408 Installing : python3-ansible-runner-2.0.0a1-3.el9ost.noarch 273/408 Installing : ansible-pacemaker-1.0.4-17.1.20231213170757.7c10 274/408 Installing : python3-validations-libs-1.9.1-17.1.202309130208 275/408 Installing : validations-common-1.9.1-17.1.20231006020828.f27 276/408 Installing : ansible-collection-ansible-posix-1.2.0-1.3.el9os 277/408 Installing : ansible-collection-community-general-4.0.0-1.1.e 278/408 Installing : ansible-role-chrony-1.3.1-17.1.20230621084226.01 279/408 Installing : ansible-role-container-registry-1.4.1-17.1.20230 280/408 Installing : ansible-role-redhat-subscription-1.3.0-17.1.2023 281/408 Installing : ansible-tripleo-ipsec-11.0.1-17.1.20230620172008 282/408 Installing : ansible-collection-containers-podman-1.9.4-5.el9 283/408 Installing : ansible-collections-openstack-1.9.1-17.1.2025051 284/408 Installing : ansible-role-metalsmith-deployment-1.4.4-17.1.20 285/408 Installing : ansible-role-openstack-operations-0.0.1-17.1.202 286/408 Installing : ansible-freeipa-1.9.2-3.el9_2.noarch 287/408 Installing : rhel-system-roles-1.21.2-1.el9_2.noarch 288/408 Installing : ansible-collection-ansible-utils-2.3.0-2.el9ost. 289/408 Installing : ansible-collection-ansible-netcommon-2.2.0-1.2.e 290/408 Installing : ansible-config_template-2.0.1-17.1.2023062108392 291/408 Installing : ansible-role-atos-hsm-1.0.1-17.1.20230927001631. 292/408 Installing : ansible-role-collectd-config-0.0.3-17.1.20230620 293/408 Installing : ansible-role-qdr-config-0.0.2-17.1.2023062017113 294/408 Installing : ansible-role-thales-hsm-3.0.1-17.1.2023100722080 295/408 Installing : ansible-role-tripleo-modify-image-1.5.1-17.1.202 296/408 Installing : python3-heat-agent-ansible-2.2.1-17.1.2023062023 297/408 Installing : ansible-role-lunasa-hsm-1.1.1-17.1.2025060318083 298/408 Installing : python3-jwcrypto-0.8-4.el9.noarch 299/408 Installing : python3-ipalib-4.10.1-12.el9_2.2.noarch 300/408 Installing : python3-ipaclient-4.10.1-12.el9_2.2.noarch 301/408 Installing : libburn-1.5.4-4.el9.x86_64 302/408 Installing : iptables-nft-services-1.8.8-6.el9_1.noarch 303/408 Running scriptlet: iptables-nft-services-1.8.8-6.el9_1.noarch 303/408 Installing : libisofs-1.5.4-4.el9.x86_64 304/408 Installing : libisoburn-1.5.4-4.el9.x86_64 305/408 Installing : xorriso-1.5.4-4.el9.x86_64 306/408 Running scriptlet: xorriso-1.5.4-4.el9.x86_64 306/408 Installing : boost-nowide-1.75.0-8.el9.x86_64 307/408 Installing : leatherman-1.12.6-5.el9ost.x86_64 308/408 Installing : cpp-hocon-0.3.0-7.el9ost.x86_64 309/408 Installing : facter-1:3.14.19-3.el9ost.x86_64 310/408 Installing : ruby-facter-1:3.14.19-3.el9ost.x86_64 311/408 Installing : rubygem-puppet-resource_api-1.8.13-1.el9ost.noar 312/408 Running scriptlet: puppet-headless-7.10.0-4.el9ost.noarch 313/408 Installing : puppet-headless-7.10.0-4.el9ost.noarch 313/408 Installing : puppet-7.10.0-4.el9ost.noarch 314/408 Running scriptlet: puppet-7.10.0-4.el9ost.noarch 314/408 Installing : puppet-stdlib-6.3.1-17.1.20230621000857.7c1ae25. 315/408 Installing : puppet-inifile-4.2.1-17.1.20250804150825.df46d2a 316/408 Installing : puppet-concat-6.2.1-17.1.20250725131333.dfeabb9. 317/408 Installing : puppet-apache-6.5.2-17.1.20250811150836.e4a1532. 318/408 Installing : puppet-xinetd-3.3.1-17.1.20230620185435.8d460c4. 319/408 Installing : puppet-firewall-3.4.1-17.1.20250804151611.94f707 320/408 Installing : puppet-memcached-6.0.0-17.1.20230620184630.4c70d 321/408 Installing : puppet-sysctl-0.0.13-17.1.20250725113328.847ec1c 322/408 Installing : puppet-rsync-1.1.4-17.1.20250804151510.ea6397e.e 323/408 Installing : puppet-corosync-8.0.1-17.1.20230621104908.6a9da9 324/408 Installing : puppet-mysql-10.6.1-17.1.20230621022141.937d044. 325/408 Installing : puppet-systemd-2.12.1-17.1.20230620191611.8f68b0 326/408 Installing : puppet-redis-6.1.1-17.1.20230620191704.547105e.e 327/408 Installing : puppet-openstack_extras-18.5.1-17.1.202306210424 328/408 Installing : puppet-horizon-18.6.1-17.1.20251008130751.8074e6 329/408 Installing : puppet-pacemaker-1.5.1-17.1.20250214161010.f10ce 330/408 Installing : puppet-auditd-2.2.1-17.1.20230620192453.189b22b. 331/408 Installing : puppet-collectd-13.0.1-17.1.20230620193147.ad138 332/408 Installing : puppet-dns-8.2.1-17.1.20230621002045.70f5b28.el9 333/408 Installing : puppet-rsyslog-4.0.1-17.1.20230620200132.2548a0d 334/408 Installing : puppet-ssh-6.2.1-17.1.20230620190251.6e0f430.el9 335/408 Installing : puppet-haproxy-4.2.2-17.1.20250725114653.a797b8c 336/408 Installing : puppet-keepalived-0.0.2-17.1.20250725114922.bbca 337/408 Installing : puppet-fdio-18.2-1.20220727113954.6fd1c8e.el9ost 338/408 Installing : puppet-archive-4.6.1-17.1.20230620181607.bc7e4ff 339/408 Installing : puppet-rabbitmq-11.0.1-17.1.20230620182519.63fee 340/408 Installing : puppet-openstacklib-18.5.2-17.1.20230621052822.6 341/408 Installing : puppet-oslo-18.5.1-17.1.20230621052358.fe2a147.e 342/408 Installing : puppet-keystone-18.6.1-17.1.20251008090800.cb0ad 343/408 Installing : puppet-glance-18.6.1-17.1.20250220151014.81b081d 344/408 Installing : puppet-cinder-18.5.2-17.1.20230621054224.6aa60e7 345/408 Installing : puppet-nova-18.6.1-17.1.20230621090443.a21eae4.e 346/408 Installing : puppet-octavia-18.5.1-17.1.20230621062420.842492 347/408 Installing : puppet-manila-18.5.2-17.1.20231102190827.a72a7d5 348/408 Installing : puppet-ceilometer-18.4.3-17.1.20230927010818.383 349/408 Installing : puppet-designate-18.6.1-17.1.20230621061456.f4c0 350/408 Installing : puppet-gnocchi-18.4.3-17.1.20230621061020.7584b9 351/408 Installing : puppet-heat-18.4.1-17.1.20230621111632.3b41bb0.e 352/408 Installing : puppet-placement-5.4.3-17.1.20230621061610.e7557 353/408 Installing : puppet-swift-18.6.1-17.1.20230621052139.f105ffc. 354/408 Installing : puppet-barbican-18.4.2-17.1.20250625204151.af6c7 355/408 Installing : puppet-aodh-18.4.2-17.1.20230620151218.3e47b5a.e 356/408 Installing : puppet-certmonger-2.7.1-17.1.20230620184730.3e2e 357/408 Installing : puppet-etcd-1.12.3-17.1.20230620193226.e143c2d.e 358/408 Installing : puppet-nssdb-1.0.2-17.1.20230620185645.2ed2a2d.e 359/408 Installing : puppet-qdr-7.4.1-17.1.20230620195605.8a575de.el9 360/408 Installing : puppet-ovn-18.6.1-17.1.20240925094907.7805f7e.el 361/408 Installing : puppet-git-0.5.0-17.1.20250725114148.4e4498e.el9 362/408 Installing : puppet-ipaclient-2.5.2-17.1.20250725102608.b0867 363/408 Installing : puppet-snmp-3.9.1-17.1.20250725115759.5d73485.el 364/408 Installing : puppet-kmod-2.5.0-17.1.20230620190646.52e31e3.el 365/408 Installing : puppet-vswitch-14.4.3-17.1.20230621043558.51e82c 366/408 Installing : puppet-neutron-18.6.1-17.1.20230621053056.c9d467 367/408 Installing : python3-heat-agent-puppet-2.2.1-17.1.20230620232 368/408 Installing : openstack-heat-agents-2.2.1-17.1.20230620232028. 369/408 Installing : puppet-module-data-0.5.1-17.1.20250725115430.28d 370/408 Installing : puppet-remote-10.0.0-17.1.20250725114017.7420908 371/408 Installing : puppet-vcsrepo-3.1.1-17.1.20250725113408.a36ee18 372/408 Installing : puppet-ironic-18.7.1-17.1.20240305210817.edf93f9 373/408 Installing : pkgconf-m4-1.7.3-10.el9.noarch 374/408 Installing : libpkgconf-1.7.3-10.el9.x86_64 375/408 Installing : pkgconf-1.7.3-10.el9.x86_64 376/408 Installing : pkgconf-pkg-config-1.7.3-10.el9.x86_64 377/408 Installing : bash-completion-1:2.11-4.el9.noarch 378/408 Installing : python3-openstackclient-5.5.2-17.1.2025061610090 379/408 Running scriptlet: python3-openstackclient-5.5.2-17.1.2025061610090 379/408 The 'openstack bgp speaker show dragents' CLI is deprecated and will be removed in the future. Use 'openstack bgp dragent list' CLI instead. Installing : python3-ironicclient-4.6.4-17.1.20230621041552.0 380/408 Installing : python3-tripleo-common-15.4.1-17.1.2025090814082 381/408 Installing : tripleo-ansible-3.3.1-17.1.20250804050821.8debef 382/408 Installing : openstack-tripleo-validations-14.3.2-17.1.202501 383/408 Installing : python3-saharaclient-3.3.0-17.1.20230620222322.4 384/408 Installing : python3-octaviaclient-2.3.1-17.1.20231106100828. 385/408 Running scriptlet: openstack-heat-common-1:16.1.1-17.1.202507031108 386/408 Installing : openstack-heat-common-1:16.1.1-17.1.202507031108 386/408 Installing : openstack-heat-api-1:16.1.1-17.1.20250703110808. 387/408 Running scriptlet: openstack-heat-api-1:16.1.1-17.1.20250703110808. 387/408 Installing : openstack-heat-engine-1:16.1.1-17.1.202507031108 388/408 Running scriptlet: openstack-heat-engine-1:16.1.1-17.1.202507031108 388/408 Installing : openstack-heat-monolith-1:16.1.1-17.1.2025070311 389/408 Running scriptlet: openstack-heat-monolith-1:16.1.1-17.1.2025070311 389/408 Installing : libkadm5-1.20.1-8.el9.x86_64 390/408 Installing : krb5-pkinit-1.20.1-8.el9.x86_64 391/408 Installing : krb5-workstation-1.20.1-8.el9.x86_64 392/408 Installing : ansible-tripleo-ipa-0.3.1-17.1.20230627190951.8d 393/408 Installing : openstack-tripleo-common-15.4.1-17.1.20250908140 394/408 Installing : openstack-tripleo-heat-templates-14.3.1-17.1.202 395/408 Installing : glibc-langpack-en-2.34-60.el9.x86_64 396/408 Installing : puppet-tripleo-14.2.3-17.1.20250320101351.40278e 397/408 Installing : python3-tripleoclient-16.5.1-17.1.20250728123209 398/408 Running scriptlet: cephadm-2:18.2.1-361.el9cp.noarch 399/408 Installing : cephadm-2:18.2.1-361.el9cp.noarch 399/408 Cleanup : device-mapper-9:1.02.187-7.el9.x86_64 400/408 Cleanup : util-linux-2.37.4-10.el9.x86_64 401/408 Cleanup : util-linux-core-2.37.4-10.el9.x86_64 402/408 Cleanup : libfdisk-2.37.4-10.el9.x86_64 403/408 Cleanup : libmount-2.37.4-10.el9.x86_64 404/408 Cleanup : libblkid-2.37.4-10.el9.x86_64 405/408 Cleanup : libuuid-2.37.4-10.el9.x86_64 406/408 Cleanup : libsmartcols-2.37.4-10.el9.x86_64 407/408 Cleanup : device-mapper-libs-9:1.02.187-7.el9.x86_64 408/408 Running scriptlet: ipa-selinux-4.10.1-6.el9.noarch 408/408 Running scriptlet: device-mapper-libs-9:1.02.187-7.el9.x86_64 408/408 Verifying : bash-completion-1:2.11-4.el9.noarch 1/408 Verifying : device-mapper-persistent-data-0.9.0-13.el9.x86_6 2/408 Verifying : libaio-0.3.111-13.el9.x86_64 3/408 Verifying : glibc-langpack-en-2.34-60.el9.x86_64 4/408 Verifying : krb5-pkinit-1.20.1-8.el9.x86_64 5/408 Verifying : krb5-workstation-1.20.1-8.el9.x86_64 6/408 Verifying : libkadm5-1.20.1-8.el9.x86_64 7/408 Verifying : libpkgconf-1.7.3-10.el9.x86_64 8/408 Verifying : pkgconf-1.7.3-10.el9.x86_64 9/408 Verifying : pkgconf-m4-1.7.3-10.el9.noarch 10/408 Verifying : pkgconf-pkg-config-1.7.3-10.el9.x86_64 11/408 Verifying : python3-dns-2.2.1-2.el9.noarch 12/408 Verifying : python3-openvswitch3.3-3.3.6-141.el9fdp.x86_64 13/408 Verifying : perl-IO-Socket-SSL-2.073-1.el9.noarch 14/408 Verifying : boost-system-1.75.0-8.el9.x86_64 15/408 Verifying : perl-Getopt-Long-1:2.52-4.el9.noarch 16/408 Verifying : perl-Storable-1:3.21-460.el9.x86_64 17/408 Verifying : python3-urllib-gssapi-1.0.2-4.el9.noarch 18/408 Verifying : boost-locale-1.75.0-8.el9.x86_64 19/408 Verifying : boost-nowide-1.75.0-8.el9.x86_64 20/408 Verifying : boost-thread-1.75.0-8.el9.x86_64 21/408 Verifying : perl-Data-Dumper-2.174-462.el9.x86_64 22/408 Verifying : perl-Exporter-5.74-461.el9.noarch 23/408 Verifying : perl-File-Temp-1:0.231.100-4.el9.noarch 24/408 Verifying : perl-Text-Tabs+Wrap-2013.0523-460.el9.noarch 25/408 Verifying : perl-parent-1:0.238-460.el9.noarch 26/408 Verifying : python3-pyasn1-modules-0.4.8-6.el9.noarch 27/408 Verifying : xorriso-1.5.4-4.el9.x86_64 28/408 Verifying : libisofs-1.5.4-4.el9.x86_64 29/408 Verifying : perl-IO-Socket-IP-0.41-5.el9.noarch 30/408 Verifying : perl-Net-SSLeay-1.92-2.el9.x86_64 31/408 Verifying : perl-PathTools-3.78-461.el9.x86_64 32/408 Verifying : perl-Pod-Simple-1:3.42-4.el9.noarch 33/408 Verifying : perl-Term-ANSIColor-5.01-461.el9.noarch 34/408 Verifying : perl-Text-ParseWords-3.30-460.el9.noarch 35/408 Verifying : perl-URI-5.09-3.el9.noarch 36/408 Verifying : perl-libnet-3.13-4.el9.noarch 37/408 Verifying : python3-lxml-4.6.5-3.el9.x86_64 38/408 Verifying : boost-chrono-1.75.0-8.el9.x86_64 39/408 Verifying : iptables-nft-services-1.8.8-6.el9_1.noarch 40/408 Verifying : perl-Carp-1.50-460.el9.noarch 41/408 Verifying : perl-Encode-4:3.08-462.el9.x86_64 42/408 Verifying : perl-Pod-Escapes-1:1.07-460.el9.noarch 43/408 Verifying : python3-psutil-5.8.0-12.el9.x86_64 44/408 Verifying : libburn-1.5.4-4.el9.x86_64 45/408 Verifying : perl-Mozilla-CA-20200520-6.el9.noarch 46/408 Verifying : perl-Term-Cap-1.17-460.el9.noarch 47/408 Verifying : perl-Time-Local-2:1.300-7.el9.noarch 48/408 Verifying : python3-jwcrypto-0.8-4.el9.noarch 49/408 Verifying : python3-yubico-1.3.3-7.el9.noarch 50/408 Verifying : sshpass-1.09-4.el9.x86_64 51/408 Verifying : boost-filesystem-1.75.0-8.el9.x86_64 52/408 Verifying : boost-log-1.75.0-8.el9.x86_64 53/408 Verifying : boost-program-options-1.75.0-8.el9.x86_64 54/408 Verifying : perl-Digest-MD5-2.58-4.el9.x86_64 55/408 Verifying : perl-MIME-Base64-3.16-4.el9.x86_64 56/408 Verifying : boost-atomic-1.75.0-8.el9.x86_64 57/408 Verifying : libisoburn-1.5.4-4.el9.x86_64 58/408 Verifying : perl-Digest-1.19-4.el9.noarch 59/408 Verifying : perl-File-Path-2.18-4.el9.noarch 60/408 Verifying : perl-Pod-Perldoc-3.28.01-461.el9.noarch 61/408 Verifying : perl-Pod-Usage-4:2.01-4.el9.noarch 62/408 Verifying : perl-Scalar-List-Utils-4:1.56-461.el9.x86_64 63/408 Verifying : perl-Socket-4:2.031-4.el9.x86_64 64/408 Verifying : perl-constant-1.33-461.el9.noarch 65/408 Verifying : perl-podlators-1:4.14-460.el9.noarch 66/408 Verifying : python3-PyMySQL-0.10.1-6.el9.noarch 67/408 Verifying : python3-appdirs-1.4.4-4.el9.noarch 68/408 Verifying : python3-pyasn1-0.4.8-6.el9.noarch 69/408 Verifying : python3-pyusb-1.0.2-13.el9.noarch 70/408 Verifying : python3-qrcode-core-6.1-12.el9.noarch 71/408 Verifying : python3-wcwidth-0.2.5-8.el9.noarch 72/408 Verifying : boost-regex-1.75.0-8.el9.x86_64 73/408 Verifying : oniguruma-6.9.6-1.el9.5.x86_64 74/408 Verifying : python3-alembic-1.7.5-3.el9.noarch 75/408 Verifying : python3-augeas-0.5.0-25.el9.noarch 76/408 Verifying : python3-gssapi-1.6.9-5.el9.x86_64 77/408 Verifying : openssl-perl-1:3.0.7-6.el9_2.x86_64 78/408 Verifying : perl-AutoLoader-5.74-480.el9.noarch 79/408 Verifying : perl-B-1.80-480.el9.x86_64 80/408 Verifying : perl-Class-Struct-0.66-480.el9.noarch 81/408 Verifying : perl-Errno-1.30-480.el9.x86_64 82/408 Verifying : perl-Fcntl-1.13-480.el9.x86_64 83/408 Verifying : perl-File-Basename-2.85-480.el9.noarch 84/408 Verifying : perl-File-stat-1.09-480.el9.noarch 85/408 Verifying : perl-FileHandle-2.03-480.el9.noarch 86/408 Verifying : perl-Getopt-Std-1.12-480.el9.noarch 87/408 Verifying : perl-IO-1.43-480.el9.x86_64 88/408 Verifying : perl-IPC-Open3-1.21-480.el9.noarch 89/408 Verifying : perl-NDBM_File-1.15-480.el9.x86_64 90/408 Verifying : perl-POSIX-1.94-480.el9.x86_64 91/408 Verifying : perl-SelectSaver-1.02-480.el9.noarch 92/408 Verifying : perl-Symbol-1.08-480.el9.noarch 93/408 Verifying : perl-base-2.27-480.el9.noarch 94/408 Verifying : perl-if-0.60.800-480.el9.noarch 95/408 Verifying : perl-interpreter-4:5.32.1-480.el9.x86_64 96/408 Verifying : perl-libs-4:5.32.1-480.el9.x86_64 97/408 Verifying : perl-mro-1.23-480.el9.x86_64 98/408 Verifying : perl-overload-1.31-480.el9.noarch 99/408 Verifying : perl-overloading-0.02-480.el9.noarch 100/408 Verifying : perl-subs-1.03-480.el9.noarch 101/408 Verifying : perl-vars-1.05-480.el9.noarch 102/408 Verifying : python3-ldap-3.4.3-2.el9.x86_64 103/408 Verifying : python3-mako-1.1.4-6.el9.noarch 104/408 Verifying : augeas-libs-1.13.0-3.el9.x86_64 105/408 Verifying : ipa-selinux-4.10.1-6.el9.noarch 106/408 Verifying : jq-1.6-14.el9.x86_64 107/408 Verifying : libselinux-ruby-3.5-1.el9.x86_64 108/408 Verifying : ansible-freeipa-1.9.2-3.el9_2.noarch 109/408 Verifying : rhel-system-roles-1.21.2-1.el9_2.noarch 110/408 Verifying : ipa-client-common-4.10.1-12.el9_2.2.noarch 111/408 Verifying : ipa-common-4.10.1-12.el9_2.2.noarch 112/408 Verifying : python3-ipaclient-4.10.1-12.el9_2.2.noarch 113/408 Verifying : python3-ipalib-4.10.1-12.el9_2.2.noarch 114/408 Verifying : perl-HTTP-Tiny-0.076-461.el9_2.noarch 115/408 Verifying : skopeo-2:1.11.2-0.1.el9_2.2.x86_64 116/408 Verifying : ruby-3.0.4-161.el9_2.2.x86_64 117/408 Verifying : ruby-default-gems-3.0.4-161.el9_2.2.noarch 118/408 Verifying : ruby-libs-3.0.4-161.el9_2.2.x86_64 119/408 Verifying : rubygem-bigdecimal-3.0.0-161.el9_2.2.x86_64 120/408 Verifying : rubygem-bundler-2.2.33-161.el9_2.2.noarch 121/408 Verifying : rubygem-io-console-0.5.7-161.el9_2.2.x86_64 122/408 Verifying : rubygem-json-2.5.1-161.el9_2.2.x86_64 123/408 Verifying : rubygem-psych-3.3.2-161.el9_2.2.x86_64 124/408 Verifying : rubygem-rdoc-6.3.3-161.el9_2.2.noarch 125/408 Verifying : rubygem-rexml-3.2.5-161.el9_2.2.noarch 126/408 Verifying : rubygems-3.2.33-161.el9_2.2.noarch 127/408 Verifying : buildah-1:1.29.5-1.el9_2.x86_64 128/408 Verifying : libxslt-1.1.34-11.el9_2.x86_64 129/408 Verifying : qemu-img-17:7.2.0-14.el9_2.18.x86_64 130/408 Verifying : ansible-collection-ansible-netcommon-2.2.0-1.2.e 131/408 Verifying : ansible-collection-ansible-posix-1.2.0-1.3.el9os 132/408 Verifying : ansible-collection-ansible-utils-2.3.0-2.el9ost. 133/408 Verifying : ansible-collection-community-general-4.0.0-1.1.e 134/408 Verifying : golang-github-vbatts-tar-split-0.11.1-9.el9ost.x 135/408 Verifying : heat-cfntools-1.4.2-6.1.el9ost.noarch 136/408 Verifying : libsodium-1.0.18-7.el9ost.x86_64 137/408 Verifying : openpgm-5.2.122-26.el9ost.x86_64 138/408 Verifying : puppet-fdio-18.2-1.20220727113954.6fd1c8e.el9ost 139/408 Verifying : python3-ansible-runner-2.0.0a1-3.el9ost.noarch 140/408 Verifying : python3-bcrypt-3.1.7-7.el9ost.x86_64 141/408 Verifying : python3-beautifulsoup4-4.9.3-2.el9ost.noarch 142/408 Verifying : python3-croniter-0.3.35-2.el9ost.noarch 143/408 Verifying : python3-daemon-2.3.0-1.el9ost.noarch 144/408 Verifying : python3-docutils-0.16-6.el9ost.noarch 145/408 Verifying : python3-etcd3gw-0.2.6-2.el9ost.noarch 146/408 Verifying : python3-gevent-21.1.2-2.el9ost.x86_64 147/408 Verifying : python3-gitdb-4.0.5-2.el9ost.noarch 148/408 Verifying : python3-jeepney-0.6.0-2.el9ost.noarch 149/408 Verifying : python3-jmespath-0.10.0-1.el9ost.noarch 150/408 Verifying : python3-lockfile-1:0.12.2-2.el9ost.noarch 151/408 Verifying : python3-logutils-0.3.5-15.el9ost.noarch 152/408 Verifying : python3-memcached-1.59-3.el9ost.noarch 153/408 Verifying : python3-migrate-0.13.0-1.el9ost.noarch 154/408 Verifying : python3-mimeparse-1.6.0-16.el9ost.noarch 155/408 Verifying : python3-natsort-7.1.1-2.el9ost.noarch 156/408 Verifying : python3-passlib-1.7.4-3.el9ost.noarch 157/408 Verifying : python3-pecan-1.4.0-2.el9ost.noarch 158/408 Verifying : python3-pyOpenSSL-20.0.1-2.el9ost.noarch 159/408 Verifying : python3-pynacl-1.4.0-1.el9ost.x86_64 160/408 Verifying : python3-pyperclip-1.8.0-3.el9ost.noarch 161/408 Verifying : python3-setproctitle-1.2.2-1.el9ost.x86_64 162/408 Verifying : python3-smmap-3.0.1-4.el9ost.noarch 163/408 Verifying : python3-soupsieve-2.2-1.el9ost.noarch 164/408 Verifying : python3-sqlalchemy13-1.3.24-3.el9ost.x86_64 165/408 Verifying : python3-tenacity-6.3.1-1.el9ost.noarch 166/408 Verifying : python3-webtest-2.0.35-6.el9ost.noarch 167/408 Verifying : python3-zmq-22.0.3-1.el9ost.x86_64 168/408 Verifying : python3-zope-event-4.2.0-20.el9ost.noarch 169/408 Verifying : python3-zope-interface-5.4.0-1.el9ost.x86_64 170/408 Verifying : rubygem-concurrent-ruby-1.1.5-2.el9ost.noarch 171/408 Verifying : rubygem-deep_merge-1.2.1-7.el9ost.noarch 172/408 Verifying : rubygem-fast_gettext-1.2.0-9.el9ost.noarch 173/408 Verifying : rubygem-hocon-1.3.1-2.el9ost.noarch 174/408 Verifying : rubygem-multi_json-1.15.0-2.el9ost.noarch 175/408 Verifying : rubygem-puppet-resource_api-1.8.13-1.el9ost.noar 176/408 Verifying : rubygem-ruby-shadow-2.5.0-15.el9ost.x86_64 177/408 Verifying : rubygem-semantic_puppet-1.0.4-2.el9ost.noarch 178/408 Verifying : zeromq-4.3.4-1.el9ost.x86_64 179/408 Verifying : ansible-config_template-2.0.1-17.1.2023062108392 180/408 Verifying : ansible-role-atos-hsm-1.0.1-17.1.20230927001631. 181/408 Verifying : ansible-role-chrony-1.3.1-17.1.20230621084226.01 182/408 Verifying : ansible-role-collectd-config-0.0.3-17.1.20230620 183/408 Verifying : ansible-role-container-registry-1.4.1-17.1.20230 184/408 Verifying : ansible-role-openstack-operations-0.0.1-17.1.202 185/408 Verifying : ansible-role-qdr-config-0.0.2-17.1.2023062017113 186/408 Verifying : ansible-role-redhat-subscription-1.3.0-17.1.2023 187/408 Verifying : ansible-role-thales-hsm-3.0.1-17.1.2023100722080 188/408 Verifying : ansible-role-tripleo-modify-image-1.5.1-17.1.202 189/408 Verifying : ansible-tripleo-ipa-0.3.1-17.1.20230627190951.8d 190/408 Verifying : ansible-tripleo-ipsec-11.0.1-17.1.20230620172008 191/408 Verifying : dib-utils-0.0.11-17.1.20230620173328.51661c3.el9 192/408 Verifying : openstack-heat-agents-2.2.1-17.1.20230620232028. 193/408 Verifying : os-apply-config-13.1.1-17.1.20231020001757.3c7f9 194/408 Verifying : os-collect-config-13.1.1-17.1.20231020001748.761 195/408 Verifying : os-refresh-config-13.1.1-17.1.20231020000854.812 196/408 Verifying : puppet-aodh-18.4.2-17.1.20230620151218.3e47b5a.e 197/408 Verifying : puppet-archive-4.6.1-17.1.20230620181607.bc7e4ff 198/408 Verifying : puppet-auditd-2.2.1-17.1.20230620192453.189b22b. 199/408 Verifying : puppet-ceilometer-18.4.3-17.1.20230927010818.383 200/408 Verifying : puppet-certmonger-2.7.1-17.1.20230620184730.3e2e 201/408 Verifying : puppet-cinder-18.5.2-17.1.20230621054224.6aa60e7 202/408 Verifying : puppet-collectd-13.0.1-17.1.20230620193147.ad138 203/408 Verifying : puppet-corosync-8.0.1-17.1.20230621104908.6a9da9 204/408 Verifying : puppet-designate-18.6.1-17.1.20230621061456.f4c0 205/408 Verifying : puppet-dns-8.2.1-17.1.20230621002045.70f5b28.el9 206/408 Verifying : puppet-etcd-1.12.3-17.1.20230620193226.e143c2d.e 207/408 Verifying : puppet-gnocchi-18.4.3-17.1.20230621061020.7584b9 208/408 Verifying : puppet-heat-18.4.1-17.1.20230621111632.3b41bb0.e 209/408 Verifying : puppet-kmod-2.5.0-17.1.20230620190646.52e31e3.el 210/408 Verifying : puppet-manila-18.5.2-17.1.20231102190827.a72a7d5 211/408 Verifying : puppet-memcached-6.0.0-17.1.20230620184630.4c70d 212/408 Verifying : puppet-mysql-10.6.1-17.1.20230621022141.937d044. 213/408 Verifying : puppet-neutron-18.6.1-17.1.20230621053056.c9d467 214/408 Verifying : puppet-nova-18.6.1-17.1.20230621090443.a21eae4.e 215/408 Verifying : puppet-nssdb-1.0.2-17.1.20230620185645.2ed2a2d.e 216/408 Verifying : puppet-octavia-18.5.1-17.1.20230621062420.842492 217/408 Verifying : puppet-openstack_extras-18.5.1-17.1.202306210424 218/408 Verifying : puppet-openstacklib-18.5.2-17.1.20230621052822.6 219/408 Verifying : puppet-oslo-18.5.1-17.1.20230621052358.fe2a147.e 220/408 Verifying : puppet-placement-5.4.3-17.1.20230621061610.e7557 221/408 Verifying : puppet-qdr-7.4.1-17.1.20230620195605.8a575de.el9 222/408 Verifying : puppet-rabbitmq-11.0.1-17.1.20230620182519.63fee 223/408 Verifying : puppet-redis-6.1.1-17.1.20230620191704.547105e.e 224/408 Verifying : puppet-rsyslog-4.0.1-17.1.20230620200132.2548a0d 225/408 Verifying : puppet-ssh-6.2.1-17.1.20230620190251.6e0f430.el9 226/408 Verifying : puppet-stdlib-6.3.1-17.1.20230621000857.7c1ae25. 227/408 Verifying : puppet-swift-18.6.1-17.1.20230621052139.f105ffc. 228/408 Verifying : puppet-systemd-2.12.1-17.1.20230620191611.8f68b0 229/408 Verifying : puppet-vswitch-14.4.3-17.1.20230621043558.51e82c 230/408 Verifying : puppet-xinetd-3.3.1-17.1.20230620185435.8d460c4. 231/408 Verifying : python-oslo-cache-lang-2.7.1-17.1.20230621012756 232/408 Verifying : python-oslo-log-lang-4.4.0-17.1.20230620205407.9 233/408 Verifying : python-oslo-middleware-lang-4.2.1-17.1.202306210 234/408 Verifying : python-oslo-policy-lang-3.7.1-17.1.2023062100394 235/408 Verifying : python-oslo-versionedobjects-lang-2.4.1-17.1.202 236/408 Verifying : python-pycadf-common-3.1.1-17.1.20230620210757.4 237/408 Verifying : python3-GitPython-3.1.14-2.el9ost.noarch 238/408 Verifying : python3-aodhclient-2.2.0-17.1.20230620222234.b74 239/408 Verifying : python3-barbicanclient-5.3.0-17.1.20230620213453 240/408 Verifying : python3-cinderclient-7.4.1-17.1.20230620211836.4 241/408 Verifying : python3-cliff-3.7.0-17.1.20230620172206.117a100. 242/408 Verifying : python3-designateclient-4.2.1-17.1.2023062102313 243/408 Verifying : python3-futurist-2.3.0-17.1.20230621104020.1a1c6 244/408 Verifying : python3-heat-agent-2.2.1-17.1.20230620232028.ed1 245/408 Verifying : python3-heat-agent-ansible-2.2.1-17.1.2023062023 246/408 Verifying : python3-heat-agent-apply-config-2.2.1-17.1.20230 247/408 Verifying : python3-heat-agent-docker-cmd-2.2.1-17.1.2023062 248/408 Verifying : python3-heat-agent-hiera-2.2.1-17.1.202306202320 249/408 Verifying : python3-heat-agent-json-file-2.2.1-17.1.20230620 250/408 Verifying : python3-heat-agent-puppet-2.2.1-17.1.20230620232 251/408 Verifying : python3-heatclient-2.3.1-17.1.20230621012952.d16 252/408 Verifying : python3-ironic-inspector-client-4.5.0-17.1.20230 253/408 Verifying : python3-ironicclient-4.6.4-17.1.20230621041552.0 254/408 Verifying : python3-keystoneclient-1:4.3.0-17.1.202306210251 255/408 Verifying : python3-keystonemiddleware-9.2.0-17.1.2023062021 256/408 Verifying : python3-magnumclient-3.4.1-17.1.20230621053508.2 257/408 Verifying : python3-mistralclient-4.2.0-17.1.20230620220528. 258/408 Verifying : python3-neutronclient-7.3.1-17.1.20230621044049. 259/408 Verifying : python3-novaclient-1:17.4.1-17.1.20230621034300. 260/408 Verifying : python3-os-client-config-2.1.0-17.1.202306202031 261/408 Verifying : python3-os-service-types-1.7.0-17.1.202306202012 262/408 Verifying : python3-os-traits-2.5.0-17.1.20230620221402.ac1b 263/408 Verifying : python3-osc-lib-2.3.1-17.1.20230620203400.2b7a67 264/408 Verifying : python3-oslo-cache-2.7.1-17.1.20230621012756.d02 265/408 Verifying : python3-oslo-context-3.2.1-17.1.20230620204857.b 266/408 Verifying : python3-oslo-log-4.4.0-17.1.20230620205407.9b29c 267/408 Verifying : python3-oslo-middleware-4.2.1-17.1.2023062101122 268/408 Verifying : python3-oslo-policy-3.7.1-17.1.20230621003949.63 269/408 Verifying : python3-oslo-reports-2.2.0-17.1.20230620210621.b 270/408 Verifying : python3-oslo-rootwrap-6.3.1-17.1.20230621110703. 271/408 Verifying : python3-oslo-serialization-4.1.1-17.1.2023062101 272/408 Verifying : python3-oslo-upgradecheck-1.3.1-17.1.20230621010 273/408 Verifying : python3-oslo-versionedobjects-2.4.1-17.1.2023062 274/408 Verifying : python3-osprofiler-3.4.0-17.1.20230620215259.5d8 275/408 Verifying : python3-pycadf-3.1.1-17.1.20230620210757.4179996 276/408 Verifying : python3-requestsexceptions-1.4.0-17.1.2023062016 277/408 Verifying : python3-saharaclient-3.3.0-17.1.20230620222322.4 278/408 Verifying : python3-shade-1.33.0-17.1.20230620225148.e7c7f29 279/408 Verifying : python3-statsd-3.2.1-20.1.el9ost.noarch 280/408 Verifying : python3-swiftclient-3.11.1-17.1.20230620204150.0 281/408 Verifying : python3-tinyrpc-1.0.3-6.el9ost.noarch 282/408 Verifying : python3-troveclient-7.0.0-17.1.20230620220924.c7 283/408 Verifying : python3-validations-libs-1.9.1-17.1.202309130208 284/408 Verifying : python3-zaqarclient-2.4.0-17.1.20230620214253.e3 285/408 Verifying : validations-common-1.9.1-17.1.20231006020828.f27 286/408 Verifying : python3-paramiko-2.11.0-2.el9ost.noarch 287/408 Verifying : python3-werkzeug-2.0.1-7.el9ost.noarch 288/408 Verifying : ansible-pacemaker-1.0.4-17.1.20231213170757.7c10 289/408 Verifying : python3-manilaclient-2.6.4-17.1.20240830140806.7 290/408 Verifying : python3-sqlparse-0.4.1-2.el9ost.noarch 291/408 Verifying : python3-webob-1.8.7-2.1.el9ost.noarch 292/408 Verifying : ansible-role-metalsmith-deployment-1.4.4-17.1.20 293/408 Verifying : openstack-selinux-0.8.37-17.1.20231107080825.05d 294/408 Verifying : puppet-ironic-18.7.1-17.1.20240305210817.edf93f9 295/408 Verifying : python-oslo-db-lang-8.5.2-17.1.20240820150750.26 296/408 Verifying : python3-glanceclient-1:3.3.0-17.1.20240712170803 297/408 Verifying : python3-keystoneauth1-4.4.0-17.1.20240812145234. 298/408 Verifying : python3-metalsmith-1.4.4-17.1.20240522060758.5e7 299/408 Verifying : python3-neutron-lib-2.10.3-17.1.20231221164814.6 300/408 Verifying : python3-octaviaclient-2.3.1-17.1.20231106100828. 301/408 Verifying : python3-oslo-db-8.5.2-17.1.20240820150750.26fd6f 302/408 Verifying : python3-oslo-messaging-12.7.3-17.1.2023121913080 303/408 Verifying : python3-oslo-service-2.5.1-17.1.20240821200745.c 304/408 Verifying : python3-rhosp-openvswitch-3.3-1.el9ost.noarch 305/408 Verifying : python3-waitress-2.0.0-4.el9ost.noarch 306/408 Verifying : openstack-ansible-core-2.14.2-4.7.el9ost.x86_64 307/408 Verifying : openstack-tripleo-validations-14.3.2-17.1.202501 308/408 Verifying : puppet-glance-18.6.1-17.1.20250220151014.81b081d 309/408 Verifying : puppet-ovn-18.6.1-17.1.20240925094907.7805f7e.el 310/408 Verifying : puppet-pacemaker-1.5.1-17.1.20250214161010.f10ce 311/408 Verifying : python3-os-ken-1.4.1-17.1.20241205090937.018d755 312/408 Verifying : puppet-tripleo-14.2.3-17.1.20250320101351.40278e 313/408 Verifying : ansible-collection-containers-podman-1.9.4-5.el9 314/408 Verifying : ansible-collections-openstack-1.9.1-17.1.2025051 315/408 Verifying : cpp-hocon-0.3.0-7.el9ost.x86_64 316/408 Verifying : facter-1:3.14.19-3.el9ost.x86_64 317/408 Verifying : hiera-3.6.0-6.el9ost.noarch 318/408 Verifying : leatherman-1.12.6-5.el9ost.x86_64 319/408 Verifying : puppet-7.10.0-4.el9ost.noarch 320/408 Verifying : puppet-headless-7.10.0-4.el9ost.noarch 321/408 Verifying : python3-amqp-5.0.6-5.el9ost.noarch 322/408 Verifying : python3-boto-2.45.0-8.1.el9ost.noarch 323/408 Verifying : python3-cachetools-4.2.2-3.el9ost.noarch 324/408 Verifying : python3-cmd2-1.4.0-4.el9ost.noarch 325/408 Verifying : python3-colorama-0.4.4-4.el9ost.noarch 326/408 Verifying : python3-dogpile-cache-1.1.5-5.el9ost.noarch 327/408 Verifying : python3-extras-1.0.0-17.el9ost.noarch 328/408 Verifying : python3-fixtures-3.0.0-24.el9ost.noarch 329/408 Verifying : python3-keyring-21.8.0-4.el9ost.noarch 330/408 Verifying : python3-kombu-1:5.0.2-3.el9ost.noarch 331/408 Verifying : python3-msgpack-1.0.2-4.el9ost.x86_64 332/408 Verifying : python3-munch-2.5.0-6.el9ost.noarch 333/408 Verifying : python3-openstacksdk-0.55.1-17.1.20250516211008. 334/408 Verifying : python3-paste-3.5.0-5.el9ost.noarch 335/408 Verifying : python3-paste-deploy-2.1.1-4.el9ost.noarch 336/408 Verifying : python3-pystache-0.5.4-15.el9ost.noarch 337/408 Verifying : python3-repoze-lru-0.7-12.el9ost.noarch 338/408 Verifying : python3-routes-2.4.1-14.el9ost.noarch 339/408 Verifying : python3-rsa-4.6-3.1.el9ost.noarch 340/408 Verifying : python3-secretstorage-3.3.1-3.el9ost.noarch 341/408 Verifying : python3-simplejson-3.17.5-3.el9ost.x86_64 342/408 Verifying : python3-tempita-0.5.1-27.el9ost.noarch 343/408 Verifying : python3-testtools-2.4.0-10.el9ost.noarch 344/408 Verifying : python3-vine-5.0.0-5.el9ost.noarch 345/408 Verifying : python3-warlock-1.3.3-7.el9ost.noarch 346/408 Verifying : python3-yappi-1.3.1-5.el9ost.x86_64 347/408 Verifying : python3-yaql-1.1.3-13.el9ost.noarch 348/408 Verifying : ruby-augeas-0.5.0-29.el9ost.x86_64 349/408 Verifying : ruby-facter-1:3.14.19-3.el9ost.x86_64 350/408 Verifying : yaml-cpp-0.6.3-6.el9ost.x86_64 351/408 Verifying : ansible-role-lunasa-hsm-1.1.1-17.1.2025060318083 352/408 Verifying : openstack-heat-api-1:16.1.1-17.1.20250703110808. 353/408 Verifying : openstack-heat-common-1:16.1.1-17.1.202507031108 354/408 Verifying : openstack-heat-engine-1:16.1.1-17.1.202507031108 355/408 Verifying : openstack-heat-monolith-1:16.1.1-17.1.2025070311 356/408 Verifying : puppet-barbican-18.4.2-17.1.20250625204151.af6c7 357/408 Verifying : python-openstackclient-lang-5.5.2-17.1.202506161 358/408 Verifying : python3-openstackclient-5.5.2-17.1.2025061610090 359/408 Verifying : openstack-tripleo-common-15.4.1-17.1.20250908140 360/408 Verifying : openstack-tripleo-common-containers-15.4.1-17.1. 361/408 Verifying : puppet-apache-6.5.2-17.1.20250811150836.e4a1532. 362/408 Verifying : puppet-concat-6.2.1-17.1.20250725131333.dfeabb9. 363/408 Verifying : puppet-firewall-3.4.1-17.1.20250804151611.94f707 364/408 Verifying : puppet-git-0.5.0-17.1.20250725114148.4e4498e.el9 365/408 Verifying : puppet-haproxy-4.2.2-17.1.20250725114653.a797b8c 366/408 Verifying : puppet-inifile-4.2.1-17.1.20250804150825.df46d2a 367/408 Verifying : puppet-ipaclient-2.5.2-17.1.20250725102608.b0867 368/408 Verifying : puppet-keepalived-0.0.2-17.1.20250725114922.bbca 369/408 Verifying : puppet-module-data-0.5.1-17.1.20250725115430.28d 370/408 Verifying : puppet-remote-10.0.0-17.1.20250725114017.7420908 371/408 Verifying : puppet-rsync-1.1.4-17.1.20250804151510.ea6397e.e 372/408 Verifying : puppet-snmp-3.9.1-17.1.20250725115759.5d73485.el 373/408 Verifying : puppet-sysctl-0.0.13-17.1.20250725113328.847ec1c 374/408 Verifying : puppet-vcsrepo-3.1.1-17.1.20250725113408.a36ee18 375/408 Verifying : python3-tripleo-common-15.4.1-17.1.2025090814082 376/408 Verifying : python3-tripleoclient-16.5.1-17.1.20250728123209 377/408 Verifying : tripleo-ansible-3.3.1-17.1.20250804050821.8debef 378/408 Verifying : openstack-tripleo-heat-templates-14.3.1-17.1.202 379/408 Verifying : puppet-horizon-18.6.1-17.1.20251008130751.8074e6 380/408 Verifying : puppet-keystone-18.6.1-17.1.20251008090800.cb0ad 381/408 Verifying : libipa_hbac-2.8.2-5.el9_2.5.x86_64 382/408 Verifying : python3-libipa_hbac-2.8.2-5.el9_2.5.x86_64 383/408 Verifying : python3-sss-murmur-2.8.2-5.el9_2.5.x86_64 384/408 Verifying : device-mapper-event-9:1.02.187-7.el9_2.2.x86_64 385/408 Verifying : device-mapper-event-libs-9:1.02.187-7.el9_2.2.x8 386/408 Verifying : lvm2-9:2.03.17-7.el9_2.2.x86_64 387/408 Verifying : lvm2-libs-9:2.03.17-7.el9_2.2.x86_64 388/408 Verifying : libunwind-1.6.2-1.el9cp.x86_64 389/408 Verifying : cephadm-2:18.2.1-361.el9cp.noarch 390/408 Verifying : libfdisk-2.37.4-11.el9_2.x86_64 391/408 Verifying : libfdisk-2.37.4-10.el9.x86_64 392/408 Verifying : libmount-2.37.4-11.el9_2.x86_64 393/408 Verifying : libmount-2.37.4-10.el9.x86_64 394/408 Verifying : libuuid-2.37.4-11.el9_2.x86_64 395/408 Verifying : libuuid-2.37.4-10.el9.x86_64 396/408 Verifying : util-linux-2.37.4-11.el9_2.x86_64 397/408 Verifying : util-linux-2.37.4-10.el9.x86_64 398/408 Verifying : util-linux-core-2.37.4-11.el9_2.x86_64 399/408 Verifying : util-linux-core-2.37.4-10.el9.x86_64 400/408 Verifying : libblkid-2.37.4-11.el9_2.x86_64 401/408 Verifying : libblkid-2.37.4-10.el9.x86_64 402/408 Verifying : libsmartcols-2.37.4-11.el9_2.x86_64 403/408 Verifying : libsmartcols-2.37.4-10.el9.x86_64 404/408 Verifying : device-mapper-9:1.02.187-7.el9_2.2.x86_64 405/408 Verifying : device-mapper-9:1.02.187-7.el9.x86_64 406/408 Verifying : device-mapper-libs-9:1.02.187-7.el9_2.2.x86_64 407/408 Verifying : device-mapper-libs-9:1.02.187-7.el9.x86_64 408/408 Installed products updated. Upgraded: device-mapper-9:1.02.187-7.el9_2.2.x86_64 device-mapper-libs-9:1.02.187-7.el9_2.2.x86_64 libblkid-2.37.4-11.el9_2.x86_64 libfdisk-2.37.4-11.el9_2.x86_64 libmount-2.37.4-11.el9_2.x86_64 libsmartcols-2.37.4-11.el9_2.x86_64 libuuid-2.37.4-11.el9_2.x86_64 util-linux-2.37.4-11.el9_2.x86_64 util-linux-core-2.37.4-11.el9_2.x86_64 Installed: ansible-collection-ansible-netcommon-2.2.0-1.2.el9ost.noarch ansible-collection-ansible-posix-1.2.0-1.3.el9ost.noarch ansible-collection-ansible-utils-2.3.0-2.el9ost.noarch ansible-collection-community-general-4.0.0-1.1.el9ost.noarch ansible-collection-containers-podman-1.9.4-5.el9ost.noarch ansible-collections-openstack-1.9.1-17.1.20250511000957.0e9a6f2.el9ost.noarch ansible-config_template-2.0.1-17.1.20230621083924.7951228.el9ost.noarch ansible-freeipa-1.9.2-3.el9_2.noarch ansible-pacemaker-1.0.4-17.1.20231213170757.7c10fdb.el9ost.noarch ansible-role-atos-hsm-1.0.1-17.1.20230927001631.8618a22.el9ost.noarch ansible-role-chrony-1.3.1-17.1.20230621084226.0111661.el9ost.noarch ansible-role-collectd-config-0.0.3-17.1.20230620165926.1992666.el9ost.noarch ansible-role-container-registry-1.4.1-17.1.20230621045806.a091b9c.el9ost.noarch ansible-role-lunasa-hsm-1.1.1-17.1.20250603180833.5b8127c.el9ost.noarch ansible-role-metalsmith-deployment-1.4.4-17.1.20240522060758.5e7461e.el9ost.noarch ansible-role-openstack-operations-0.0.1-17.1.20230620170737.2ab288f.el9ost.noarch ansible-role-qdr-config-0.0.2-17.1.20230620171136.b456651.el9ost.noarch ansible-role-redhat-subscription-1.3.0-17.1.20230621033420.eefe501.el9ost.noarch ansible-role-thales-hsm-3.0.1-17.1.20231007220803.f95c0fc.el9ost.noarch ansible-role-tripleo-modify-image-1.5.1-17.1.20230621064242.b6eedb6.el9ost.noarch ansible-tripleo-ipa-0.3.1-17.1.20230627190951.8d29d9e.el9ost.noarch ansible-tripleo-ipsec-11.0.1-17.1.20230620172008.b5559c8.el9ost.noarch augeas-libs-1.13.0-3.el9.x86_64 bash-completion-1:2.11-4.el9.noarch boost-atomic-1.75.0-8.el9.x86_64 boost-chrono-1.75.0-8.el9.x86_64 boost-filesystem-1.75.0-8.el9.x86_64 boost-locale-1.75.0-8.el9.x86_64 boost-log-1.75.0-8.el9.x86_64 boost-nowide-1.75.0-8.el9.x86_64 boost-program-options-1.75.0-8.el9.x86_64 boost-regex-1.75.0-8.el9.x86_64 boost-system-1.75.0-8.el9.x86_64 boost-thread-1.75.0-8.el9.x86_64 buildah-1:1.29.5-1.el9_2.x86_64 cephadm-2:18.2.1-361.el9cp.noarch cpp-hocon-0.3.0-7.el9ost.x86_64 device-mapper-event-9:1.02.187-7.el9_2.2.x86_64 device-mapper-event-libs-9:1.02.187-7.el9_2.2.x86_64 device-mapper-persistent-data-0.9.0-13.el9.x86_64 dib-utils-0.0.11-17.1.20230620173328.51661c3.el9ost.noarch facter-1:3.14.19-3.el9ost.x86_64 glibc-langpack-en-2.34-60.el9.x86_64 golang-github-vbatts-tar-split-0.11.1-9.el9ost.x86_64 heat-cfntools-1.4.2-6.1.el9ost.noarch hiera-3.6.0-6.el9ost.noarch ipa-client-common-4.10.1-12.el9_2.2.noarch ipa-common-4.10.1-12.el9_2.2.noarch ipa-selinux-4.10.1-6.el9.noarch iptables-nft-services-1.8.8-6.el9_1.noarch jq-1.6-14.el9.x86_64 krb5-pkinit-1.20.1-8.el9.x86_64 krb5-workstation-1.20.1-8.el9.x86_64 leatherman-1.12.6-5.el9ost.x86_64 libaio-0.3.111-13.el9.x86_64 libburn-1.5.4-4.el9.x86_64 libipa_hbac-2.8.2-5.el9_2.5.x86_64 libisoburn-1.5.4-4.el9.x86_64 libisofs-1.5.4-4.el9.x86_64 libkadm5-1.20.1-8.el9.x86_64 libpkgconf-1.7.3-10.el9.x86_64 libselinux-ruby-3.5-1.el9.x86_64 libsodium-1.0.18-7.el9ost.x86_64 libunwind-1.6.2-1.el9cp.x86_64 libxslt-1.1.34-11.el9_2.x86_64 lvm2-9:2.03.17-7.el9_2.2.x86_64 lvm2-libs-9:2.03.17-7.el9_2.2.x86_64 oniguruma-6.9.6-1.el9.5.x86_64 openpgm-5.2.122-26.el9ost.x86_64 openssl-perl-1:3.0.7-6.el9_2.x86_64 openstack-ansible-core-2.14.2-4.7.el9ost.x86_64 openstack-heat-agents-2.2.1-17.1.20230620232028.ed16cc7.el9ost.noarch openstack-heat-api-1:16.1.1-17.1.20250703110808.edc6d60.el9ost.noarch openstack-heat-common-1:16.1.1-17.1.20250703110808.edc6d60.el9ost.noarch openstack-heat-engine-1:16.1.1-17.1.20250703110808.edc6d60.el9ost.noarch openstack-heat-monolith-1:16.1.1-17.1.20250703110808.edc6d60.el9ost.noarch openstack-selinux-0.8.37-17.1.20231107080825.05dd1b2.el9ost.noarch openstack-tripleo-common-15.4.1-17.1.20250908140822.e5b18f2.el9ost.noarch openstack-tripleo-common-containers-15.4.1-17.1.20250908140822.e5b18f2.el9ost.noarch openstack-tripleo-heat-templates-14.3.1-17.1.20251015110812.e7c7ce3.el9ost.noarch openstack-tripleo-validations-14.3.2-17.1.20250120160809.2b526f8.el9ost.noarch os-apply-config-13.1.1-17.1.20231020001757.3c7f9b9.el9ost.noarch os-collect-config-13.1.1-17.1.20231020001748.76173d8.el9ost.noarch os-refresh-config-13.1.1-17.1.20231020000854.812905b.el9ost.noarch perl-AutoLoader-5.74-480.el9.noarch perl-B-1.80-480.el9.x86_64 perl-Carp-1.50-460.el9.noarch perl-Class-Struct-0.66-480.el9.noarch perl-Data-Dumper-2.174-462.el9.x86_64 perl-Digest-1.19-4.el9.noarch perl-Digest-MD5-2.58-4.el9.x86_64 perl-Encode-4:3.08-462.el9.x86_64 perl-Errno-1.30-480.el9.x86_64 perl-Exporter-5.74-461.el9.noarch perl-Fcntl-1.13-480.el9.x86_64 perl-File-Basename-2.85-480.el9.noarch perl-File-Path-2.18-4.el9.noarch perl-File-Temp-1:0.231.100-4.el9.noarch perl-File-stat-1.09-480.el9.noarch perl-FileHandle-2.03-480.el9.noarch perl-Getopt-Long-1:2.52-4.el9.noarch perl-Getopt-Std-1.12-480.el9.noarch perl-HTTP-Tiny-0.076-461.el9_2.noarch perl-IO-1.43-480.el9.x86_64 perl-IO-Socket-IP-0.41-5.el9.noarch perl-IO-Socket-SSL-2.073-1.el9.noarch perl-IPC-Open3-1.21-480.el9.noarch perl-MIME-Base64-3.16-4.el9.x86_64 perl-Mozilla-CA-20200520-6.el9.noarch perl-NDBM_File-1.15-480.el9.x86_64 perl-Net-SSLeay-1.92-2.el9.x86_64 perl-POSIX-1.94-480.el9.x86_64 perl-PathTools-3.78-461.el9.x86_64 perl-Pod-Escapes-1:1.07-460.el9.noarch perl-Pod-Perldoc-3.28.01-461.el9.noarch perl-Pod-Simple-1:3.42-4.el9.noarch perl-Pod-Usage-4:2.01-4.el9.noarch perl-Scalar-List-Utils-4:1.56-461.el9.x86_64 perl-SelectSaver-1.02-480.el9.noarch perl-Socket-4:2.031-4.el9.x86_64 perl-Storable-1:3.21-460.el9.x86_64 perl-Symbol-1.08-480.el9.noarch perl-Term-ANSIColor-5.01-461.el9.noarch perl-Term-Cap-1.17-460.el9.noarch perl-Text-ParseWords-3.30-460.el9.noarch perl-Text-Tabs+Wrap-2013.0523-460.el9.noarch perl-Time-Local-2:1.300-7.el9.noarch perl-URI-5.09-3.el9.noarch perl-base-2.27-480.el9.noarch perl-constant-1.33-461.el9.noarch perl-if-0.60.800-480.el9.noarch perl-interpreter-4:5.32.1-480.el9.x86_64 perl-libnet-3.13-4.el9.noarch perl-libs-4:5.32.1-480.el9.x86_64 perl-mro-1.23-480.el9.x86_64 perl-overload-1.31-480.el9.noarch perl-overloading-0.02-480.el9.noarch perl-parent-1:0.238-460.el9.noarch perl-podlators-1:4.14-460.el9.noarch perl-subs-1.03-480.el9.noarch perl-vars-1.05-480.el9.noarch pkgconf-1.7.3-10.el9.x86_64 pkgconf-m4-1.7.3-10.el9.noarch pkgconf-pkg-config-1.7.3-10.el9.x86_64 puppet-7.10.0-4.el9ost.noarch puppet-aodh-18.4.2-17.1.20230620151218.3e47b5a.el9ost.noarch puppet-apache-6.5.2-17.1.20250811150836.e4a1532.el9ost.noarch puppet-archive-4.6.1-17.1.20230620181607.bc7e4ff.el9ost.noarch puppet-auditd-2.2.1-17.1.20230620192453.189b22b.el9ost.noarch puppet-barbican-18.4.2-17.1.20250625204151.af6c77b.el9ost.noarch puppet-ceilometer-18.4.3-17.1.20230927010818.3838907.el9ost.noarch puppet-certmonger-2.7.1-17.1.20230620184730.3e2e660.el9ost.noarch puppet-cinder-18.5.2-17.1.20230621054224.6aa60e7.el9ost.noarch puppet-collectd-13.0.1-17.1.20230620193147.ad138a7.el9ost.noarch puppet-concat-6.2.1-17.1.20250725131333.dfeabb9.el9ost.noarch puppet-corosync-8.0.1-17.1.20230621104908.6a9da9a.el9ost.noarch puppet-designate-18.6.1-17.1.20230621061456.f4c0b89.el9ost.noarch puppet-dns-8.2.1-17.1.20230621002045.70f5b28.el9ost.noarch puppet-etcd-1.12.3-17.1.20230620193226.e143c2d.el9ost.noarch puppet-fdio-18.2-1.20220727113954.6fd1c8e.el9ost.noarch puppet-firewall-3.4.1-17.1.20250804151611.94f707c.el9ost.noarch puppet-git-0.5.0-17.1.20250725114148.4e4498e.el9ost.noarch puppet-glance-18.6.1-17.1.20250220151014.81b081d.el9ost.noarch puppet-gnocchi-18.4.3-17.1.20230621061020.7584b94.el9ost.noarch puppet-haproxy-4.2.2-17.1.20250725114653.a797b8c.el9ost.noarch puppet-headless-7.10.0-4.el9ost.noarch puppet-heat-18.4.1-17.1.20230621111632.3b41bb0.el9ost.noarch puppet-horizon-18.6.1-17.1.20251008130751.8074e69.el9ost.noarch puppet-inifile-4.2.1-17.1.20250804150825.df46d2a.el9ost.noarch puppet-ipaclient-2.5.2-17.1.20250725102608.b086731.el9ost.noarch puppet-ironic-18.7.1-17.1.20240305210817.edf93f9.el9ost.noarch puppet-keepalived-0.0.2-17.1.20250725114922.bbca37a.el9ost.noarch puppet-keystone-18.6.1-17.1.20251008090800.cb0adcb.el9ost.noarch puppet-kmod-2.5.0-17.1.20230620190646.52e31e3.el9ost.noarch puppet-manila-18.5.2-17.1.20231102190827.a72a7d5.el9ost.noarch puppet-memcached-6.0.0-17.1.20230620184630.4c70dbd.el9ost.noarch puppet-module-data-0.5.1-17.1.20250725115430.28dafce.el9ost.noarch puppet-mysql-10.6.1-17.1.20230621022141.937d044.el9ost.noarch puppet-neutron-18.6.1-17.1.20230621053056.c9d467f.el9ost.noarch puppet-nova-18.6.1-17.1.20230621090443.a21eae4.el9ost.noarch puppet-nssdb-1.0.2-17.1.20230620185645.2ed2a2d.el9ost.noarch puppet-octavia-18.5.1-17.1.20230621062420.842492c.el9ost.noarch puppet-openstack_extras-18.5.1-17.1.20230621042409.504e1a0.el9ost.noarch puppet-openstacklib-18.5.2-17.1.20230621052822.64d8ac6.el9ost.noarch puppet-oslo-18.5.1-17.1.20230621052358.fe2a147.el9ost.noarch puppet-ovn-18.6.1-17.1.20240925094907.7805f7e.el9ost.noarch puppet-pacemaker-1.5.1-17.1.20250214161010.f10ce89.el9ost.noarch puppet-placement-5.4.3-17.1.20230621061610.e7557a5.el9ost.noarch puppet-qdr-7.4.1-17.1.20230620195605.8a575de.el9ost.noarch puppet-rabbitmq-11.0.1-17.1.20230620182519.63fee2c.el9ost.noarch puppet-redis-6.1.1-17.1.20230620191704.547105e.el9ost.noarch puppet-remote-10.0.0-17.1.20250725114017.7420908.el9ost.noarch puppet-rsync-1.1.4-17.1.20250804151510.ea6397e.el9ost.noarch puppet-rsyslog-4.0.1-17.1.20230620200132.2548a0d.el9ost.noarch puppet-snmp-3.9.1-17.1.20250725115759.5d73485.el9ost.noarch puppet-ssh-6.2.1-17.1.20230620190251.6e0f430.el9ost.noarch puppet-stdlib-6.3.1-17.1.20230621000857.7c1ae25.el9ost.noarch puppet-swift-18.6.1-17.1.20230621052139.f105ffc.el9ost.noarch puppet-sysctl-0.0.13-17.1.20250725113328.847ec1c.el9ost.noarch puppet-systemd-2.12.1-17.1.20230620191611.8f68b0d.el9ost.noarch puppet-tripleo-14.2.3-17.1.20250320101351.40278e1.el9ost.noarch puppet-vcsrepo-3.1.1-17.1.20250725113408.a36ee18.el9ost.noarch puppet-vswitch-14.4.3-17.1.20230621043558.51e82ca.el9ost.noarch puppet-xinetd-3.3.1-17.1.20230620185435.8d460c4.el9ost.noarch python-openstackclient-lang-5.5.2-17.1.20250616100909.42d9b6e.el9ost.noarch python-oslo-cache-lang-2.7.1-17.1.20230621012756.d0252f6.el9ost.noarch python-oslo-db-lang-8.5.2-17.1.20240820150750.26fd6fb.el9ost.noarch python-oslo-log-lang-4.4.0-17.1.20230620205407.9b29c90.el9ost.noarch python-oslo-middleware-lang-4.2.1-17.1.20230621011225.b40ca5f.el9ost.noarch python-oslo-policy-lang-3.7.1-17.1.20230621003949.639b471.el9ost.noarch python-oslo-versionedobjects-lang-2.4.1-17.1.20230621010423.89ff171.el9ost.noarch python-pycadf-common-3.1.1-17.1.20230620210757.4179996.el9ost.noarch python3-GitPython-3.1.14-2.el9ost.noarch python3-PyMySQL-0.10.1-6.el9.noarch python3-alembic-1.7.5-3.el9.noarch python3-amqp-5.0.6-5.el9ost.noarch python3-ansible-runner-2.0.0a1-3.el9ost.noarch python3-aodhclient-2.2.0-17.1.20230620222234.b747ae3.el9ost.noarch python3-appdirs-1.4.4-4.el9.noarch python3-augeas-0.5.0-25.el9.noarch python3-barbicanclient-5.3.0-17.1.20230620213453.ad49c40.el9ost.noarch python3-bcrypt-3.1.7-7.el9ost.x86_64 python3-beautifulsoup4-4.9.3-2.el9ost.noarch python3-boto-2.45.0-8.1.el9ost.noarch python3-cachetools-4.2.2-3.el9ost.noarch python3-cinderclient-7.4.1-17.1.20230620211836.4f72e6f.el9ost.noarch python3-cliff-3.7.0-17.1.20230620172206.117a100.el9ost.noarch python3-cmd2-1.4.0-4.el9ost.noarch python3-colorama-0.4.4-4.el9ost.noarch python3-croniter-0.3.35-2.el9ost.noarch python3-daemon-2.3.0-1.el9ost.noarch python3-designateclient-4.2.1-17.1.20230621023138.7a8d156.el9ost.noarch python3-dns-2.2.1-2.el9.noarch python3-docutils-0.16-6.el9ost.noarch python3-dogpile-cache-1.1.5-5.el9ost.noarch python3-etcd3gw-0.2.6-2.el9ost.noarch python3-extras-1.0.0-17.el9ost.noarch python3-fixtures-3.0.0-24.el9ost.noarch python3-futurist-2.3.0-17.1.20230621104020.1a1c6f8.el9ost.noarch python3-gevent-21.1.2-2.el9ost.x86_64 python3-gitdb-4.0.5-2.el9ost.noarch python3-glanceclient-1:3.3.0-17.1.20240712170803.f802c71.el9ost.noarch python3-gssapi-1.6.9-5.el9.x86_64 python3-heat-agent-2.2.1-17.1.20230620232028.ed16cc7.el9ost.noarch python3-heat-agent-ansible-2.2.1-17.1.20230620232028.ed16cc7.el9ost.noarch python3-heat-agent-apply-config-2.2.1-17.1.20230620232028.ed16cc7.el9ost.noarch python3-heat-agent-docker-cmd-2.2.1-17.1.20230620232028.ed16cc7.el9ost.noarch python3-heat-agent-hiera-2.2.1-17.1.20230620232028.ed16cc7.el9ost.noarch python3-heat-agent-json-file-2.2.1-17.1.20230620232028.ed16cc7.el9ost.noarch python3-heat-agent-puppet-2.2.1-17.1.20230620232028.ed16cc7.el9ost.noarch python3-heatclient-2.3.1-17.1.20230621012952.d16c245.el9ost.noarch python3-ipaclient-4.10.1-12.el9_2.2.noarch python3-ipalib-4.10.1-12.el9_2.2.noarch python3-ironic-inspector-client-4.5.0-17.1.20230620205758.3c03e21.el9ost.noarch python3-ironicclient-4.6.4-17.1.20230621041552.09b78fa.el9ost.noarch python3-jeepney-0.6.0-2.el9ost.noarch python3-jmespath-0.10.0-1.el9ost.noarch python3-jwcrypto-0.8-4.el9.noarch python3-keyring-21.8.0-4.el9ost.noarch python3-keystoneauth1-4.4.0-17.1.20240812145234.112bcae.el9ost.noarch python3-keystoneclient-1:4.3.0-17.1.20230621025111.d5cb761.el9ost.noarch python3-keystonemiddleware-9.2.0-17.1.20230620211753.3659bda.el9ost.noarch python3-kombu-1:5.0.2-3.el9ost.noarch python3-ldap-3.4.3-2.el9.x86_64 python3-libipa_hbac-2.8.2-5.el9_2.5.x86_64 python3-lockfile-1:0.12.2-2.el9ost.noarch python3-logutils-0.3.5-15.el9ost.noarch python3-lxml-4.6.5-3.el9.x86_64 python3-magnumclient-3.4.1-17.1.20230621053508.280acd2.el9ost.noarch python3-mako-1.1.4-6.el9.noarch python3-manilaclient-2.6.4-17.1.20240830140806.7f7d7d3.el9ost.noarch python3-memcached-1.59-3.el9ost.noarch python3-metalsmith-1.4.4-17.1.20240522060758.5e7461e.el9ost.noarch python3-migrate-0.13.0-1.el9ost.noarch python3-mimeparse-1.6.0-16.el9ost.noarch python3-mistralclient-4.2.0-17.1.20230620220528.20a10f0.el9ost.noarch python3-msgpack-1.0.2-4.el9ost.x86_64 python3-munch-2.5.0-6.el9ost.noarch python3-natsort-7.1.1-2.el9ost.noarch python3-neutron-lib-2.10.3-17.1.20231221164814.619c0fe.el9ost.noarch python3-neutronclient-7.3.1-17.1.20230621044049.29a9f5e.el9ost.noarch python3-novaclient-1:17.4.1-17.1.20230621034300.5ee4427.el9ost.noarch python3-octaviaclient-2.3.1-17.1.20231106100828.51347bc.el9ost.noarch python3-openstackclient-5.5.2-17.1.20250616100909.42d9b6e.el9ost.noarch python3-openstacksdk-0.55.1-17.1.20250516211008.f09ed4a.el9ost.noarch python3-openvswitch3.3-3.3.6-141.el9fdp.x86_64 python3-os-client-config-2.1.0-17.1.20230620203151.bc96c23.el9ost.noarch python3-os-ken-1.4.1-17.1.20241205090937.018d755.el9ost.noarch python3-os-service-types-1.7.0-17.1.20230620201222.0b2f473.el9ost.noarch python3-os-traits-2.5.0-17.1.20230620221402.ac1b39e.el9ost.noarch python3-osc-lib-2.3.1-17.1.20230620203400.2b7a679.el9ost.noarch python3-oslo-cache-2.7.1-17.1.20230621012756.d0252f6.el9ost.noarch python3-oslo-context-3.2.1-17.1.20230620204857.b124eb7.el9ost.noarch python3-oslo-db-8.5.2-17.1.20240820150750.26fd6fb.el9ost.noarch python3-oslo-log-4.4.0-17.1.20230620205407.9b29c90.el9ost.noarch python3-oslo-messaging-12.7.3-17.1.20231219130800.5d6fd1a.el9ost.noarch python3-oslo-middleware-4.2.1-17.1.20230621011225.b40ca5f.el9ost.noarch python3-oslo-policy-3.7.1-17.1.20230621003949.639b471.el9ost.noarch python3-oslo-reports-2.2.0-17.1.20230620210621.bc631ae.el9ost.noarch python3-oslo-rootwrap-6.3.1-17.1.20230621110703.1b1b960.el9ost.noarch python3-oslo-serialization-4.1.1-17.1.20230621011445.bbe5d5a.el9ost.noarch python3-oslo-service-2.5.1-17.1.20240821200745.c1e3398.el9ost.noarch python3-oslo-upgradecheck-1.3.1-17.1.20230621010138.9561ecb.el9ost.noarch python3-oslo-versionedobjects-2.4.1-17.1.20230621010423.89ff171.el9ost.noarch python3-osprofiler-3.4.0-17.1.20230620215259.5d82a02.el9ost.noarch python3-paramiko-2.11.0-2.el9ost.noarch python3-passlib-1.7.4-3.el9ost.noarch python3-paste-3.5.0-5.el9ost.noarch python3-paste-deploy-2.1.1-4.el9ost.noarch python3-pecan-1.4.0-2.el9ost.noarch python3-psutil-5.8.0-12.el9.x86_64 python3-pyOpenSSL-20.0.1-2.el9ost.noarch python3-pyasn1-0.4.8-6.el9.noarch python3-pyasn1-modules-0.4.8-6.el9.noarch python3-pycadf-3.1.1-17.1.20230620210757.4179996.el9ost.noarch python3-pynacl-1.4.0-1.el9ost.x86_64 python3-pyperclip-1.8.0-3.el9ost.noarch python3-pystache-0.5.4-15.el9ost.noarch python3-pyusb-1.0.2-13.el9.noarch python3-qrcode-core-6.1-12.el9.noarch python3-repoze-lru-0.7-12.el9ost.noarch python3-requestsexceptions-1.4.0-17.1.20230620164652.d7ac0ff.el9ost.noarch python3-rhosp-openvswitch-3.3-1.el9ost.noarch python3-routes-2.4.1-14.el9ost.noarch python3-rsa-4.6-3.1.el9ost.noarch python3-saharaclient-3.3.0-17.1.20230620222322.401e663.el9ost.noarch python3-secretstorage-3.3.1-3.el9ost.noarch python3-setproctitle-1.2.2-1.el9ost.x86_64 python3-shade-1.33.0-17.1.20230620225148.e7c7f29.el9ost.noarch python3-simplejson-3.17.5-3.el9ost.x86_64 python3-smmap-3.0.1-4.el9ost.noarch python3-soupsieve-2.2-1.el9ost.noarch python3-sqlalchemy13-1.3.24-3.el9ost.x86_64 python3-sqlparse-0.4.1-2.el9ost.noarch python3-sss-murmur-2.8.2-5.el9_2.5.x86_64 python3-statsd-3.2.1-20.1.el9ost.noarch python3-swiftclient-3.11.1-17.1.20230620204150.06b36ae.el9ost.noarch python3-tempita-0.5.1-27.el9ost.noarch python3-tenacity-6.3.1-1.el9ost.noarch python3-testtools-2.4.0-10.el9ost.noarch python3-tinyrpc-1.0.3-6.el9ost.noarch python3-tripleo-common-15.4.1-17.1.20250908140822.e5b18f2.el9ost.noarch python3-tripleoclient-16.5.1-17.1.20250728123209.f3599d0.el9ost.noarch python3-troveclient-7.0.0-17.1.20230620220924.c7319d8.el9ost.noarch python3-urllib-gssapi-1.0.2-4.el9.noarch python3-validations-libs-1.9.1-17.1.20230913020848.8d9e1b5.el9ost.noarch python3-vine-5.0.0-5.el9ost.noarch python3-waitress-2.0.0-4.el9ost.noarch python3-warlock-1.3.3-7.el9ost.noarch python3-wcwidth-0.2.5-8.el9.noarch python3-webob-1.8.7-2.1.el9ost.noarch python3-webtest-2.0.35-6.el9ost.noarch python3-werkzeug-2.0.1-7.el9ost.noarch python3-yappi-1.3.1-5.el9ost.x86_64 python3-yaql-1.1.3-13.el9ost.noarch python3-yubico-1.3.3-7.el9.noarch python3-zaqarclient-2.4.0-17.1.20230620214253.e388947.el9ost.noarch python3-zmq-22.0.3-1.el9ost.x86_64 python3-zope-event-4.2.0-20.el9ost.noarch python3-zope-interface-5.4.0-1.el9ost.x86_64 qemu-img-17:7.2.0-14.el9_2.18.x86_64 rhel-system-roles-1.21.2-1.el9_2.noarch ruby-3.0.4-161.el9_2.2.x86_64 ruby-augeas-0.5.0-29.el9ost.x86_64 ruby-default-gems-3.0.4-161.el9_2.2.noarch ruby-facter-1:3.14.19-3.el9ost.x86_64 ruby-libs-3.0.4-161.el9_2.2.x86_64 rubygem-bigdecimal-3.0.0-161.el9_2.2.x86_64 rubygem-bundler-2.2.33-161.el9_2.2.noarch rubygem-concurrent-ruby-1.1.5-2.el9ost.noarch rubygem-deep_merge-1.2.1-7.el9ost.noarch rubygem-fast_gettext-1.2.0-9.el9ost.noarch rubygem-hocon-1.3.1-2.el9ost.noarch rubygem-io-console-0.5.7-161.el9_2.2.x86_64 rubygem-json-2.5.1-161.el9_2.2.x86_64 rubygem-multi_json-1.15.0-2.el9ost.noarch rubygem-psych-3.3.2-161.el9_2.2.x86_64 rubygem-puppet-resource_api-1.8.13-1.el9ost.noarch rubygem-rdoc-6.3.3-161.el9_2.2.noarch rubygem-rexml-3.2.5-161.el9_2.2.noarch rubygem-ruby-shadow-2.5.0-15.el9ost.x86_64 rubygem-semantic_puppet-1.0.4-2.el9ost.noarch rubygems-3.2.33-161.el9_2.2.noarch skopeo-2:1.11.2-0.1.el9_2.2.x86_64 sshpass-1.09-4.el9.x86_64 tripleo-ansible-3.3.1-17.1.20250804050821.8debef3.el9ost.noarch validations-common-1.9.1-17.1.20231006020828.f273ccb.el9ost.noarch xorriso-1.5.4-4.el9.x86_64 yaml-cpp-0.6.3-6.el9ost.x86_64 zeromq-4.3.4-1.el9ost.x86_64 Complete! + sudo hostnamectl set-hostname standalone.ooo.test + sudo hostnamectl set-hostname standalone.ooo.test --transient Hint: static hostname is already set, so the specified transient hostname will not be used. + cat + cat + cat + export HOST_PRIMARY_RESOLV_CONF_ENTRY=192.168.122.10 + HOST_PRIMARY_RESOLV_CONF_ENTRY=192.168.122.10 + export INTERFACE_MTU=1500 + INTERFACE_MTU=1500 + export NTP_SERVER=pool.ntp.org + NTP_SERVER=pool.ntp.org + export EDPM_CONFIGURE_HUGEPAGES=false + EDPM_CONFIGURE_HUGEPAGES=false + export EDPM_COMPUTE_CEPH_ENABLED=false + EDPM_COMPUTE_CEPH_ENABLED=false + export EDPM_COMPUTE_CEPH_NOVA=false + EDPM_COMPUTE_CEPH_NOVA=false + export 'CEPH_ARGS=-e /root/deployed_ceph.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/cephadm/cephadm-rbd-only.yaml' + CEPH_ARGS='-e /root/deployed_ceph.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/cephadm/cephadm-rbd-only.yaml' + [[ false == \f\a\l\s\e ]] + export 'CEPH_ARGS=-e /root/deployed_ceph.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/cephadm/cephadm-rbd-only.yaml -e /root/nova_noceph.yaml' + CEPH_ARGS='-e /root/deployed_ceph.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/cephadm/cephadm-rbd-only.yaml -e /root/nova_noceph.yaml' + export COMPUTE_DRIVER=libvirt + COMPUTE_DRIVER=libvirt + export IP=192.168.122.100 + IP=192.168.122.100 + export GATEWAY=192.168.122.10 + GATEWAY=192.168.122.10 + export STANDALONE_VM=false + STANDALONE_VM=false + export BARBICAN_ENABLED=false + BARBICAN_ENABLED=false + export MANILA_ENABLED=true + MANILA_ENABLED=true + export SWIFT_REPLICATED=false + SWIFT_REPLICATED=false + export TLSE_ENABLED=true + TLSE_ENABLED=true + export CLOUD_DOMAIN=ooo.test + CLOUD_DOMAIN=ooo.test + export OCTAVIA_ENABLED=true + OCTAVIA_ENABLED=true + export DESIGNATE_ENABLED=false + DESIGNATE_ENABLED=false + export HEAT_ENABLED=true + HEAT_ENABLED=true + export TELEMETRY_ENABLED=true + TELEMETRY_ENABLED=true + [[ -f /root/containers-prepare-parameters.yaml ]] + openstack tripleo container image prepare default --output-env-file /root/containers-prepare-parameters.yaml # Generated with the following on 2026-01-22T12:24:42.385440 # # openstack tripleo container image prepare default --output-env-file /root/containers-prepare-parameters.yaml # parameter_defaults: ContainerImagePrepare: - set: ceph_alertmanager_image: ose-prometheus-alertmanager ceph_alertmanager_namespace: registry.redhat.io/openshift4 ceph_alertmanager_tag: v4.12 ceph_grafana_image: rhceph-6-dashboard-rhel9 ceph_grafana_namespace: registry.redhat.io/rhceph ceph_grafana_tag: latest ceph_image: rhceph-6-rhel9 ceph_namespace: registry.redhat.io/rhceph ceph_node_exporter_image: ose-prometheus-node-exporter ceph_node_exporter_namespace: registry.redhat.io/openshift4 ceph_node_exporter_tag: v4.12 ceph_prometheus_image: ose-prometheus ceph_prometheus_namespace: registry.redhat.io/openshift4 ceph_prometheus_tag: v4.12 ceph_tag: latest name_prefix: openstack- name_suffix: '' namespace: registry.redhat.io/rhosp-rhel9 neutron_driver: ovn rhel_containers: false tag: '17.1' tag_from_label: '{version}-{release}' + sed -i 's|quay.io/tripleowallaby$|quay.io/tripleowallabycentos9|' /root/containers-prepare-parameters.yaml + sed -i 's|rhceph-6-rhel9|rhceph-7-rhel9|' /root/containers-prepare-parameters.yaml + sudo mkdir -p /etc/os-net-config + cat + sudo tee /etc/cloud/cloud.cfg.d/99-edpm-disable-network-config.cfg network: config: disabled + sudo systemctl enable network network.service is not a native service, redirecting to systemd-sysv-install. Executing: /usr/lib/systemd/systemd-sysv-install enable network + sudo cp /tmp/net_config.yaml /etc/os-net-config/config.yaml + sudo os-net-config -c /etc/os-net-config/config.yaml + sudo cp /tmp/net_config.yaml /root/standalone_net_config.j2 + sudo cp /tmp/network_data.yaml /root/network_data.yaml + sudo cp /tmp/deployed_network.yaml /root/deployed_network.yaml + sudo cp /tmp/Standalone.yaml /root/Standalone.yaml + [[ false == \t\r\u\e ]] + /tmp/openstack.sh + EDPM_CONFIGURE_HUGEPAGES=false + EDPM_COMPUTE_CEPH_ENABLED=false + EDPM_COMPUTE_SRIOV_ENABLED=true + EDPM_COMPUTE_DHCP_AGENT_ENABLED=true + COMPUTE_DRIVER=libvirt + INTERFACE_MTU=1500 + BARBICAN_ENABLED=false + MANILA_ENABLED=true + SWIFT_REPLICATED=false + TLSE_ENABLED=true + CLOUD_DOMAIN=ooo.test + TELEMETRY_ENABLED=true + OCTAVIA_ENABLED=true + DESIGNATE_ENABLED=false + IPA_IMAGE=quay.io/freeipa/freeipa-server:fedora-43 + export NEUTRON_INTERFACE=eth0 + NEUTRON_INTERFACE=eth0 + export CTLPLANE_IP=192.168.122.100 + CTLPLANE_IP=192.168.122.100 + export CTLPLANE_VIP=192.168.122.99 + CTLPLANE_VIP=192.168.122.99 + export DESIGNATE_BIND_IP=192.168.122.155 + DESIGNATE_BIND_IP=192.168.122.155 + export CIDR=24 + CIDR=24 + export GATEWAY=192.168.122.10 + GATEWAY=192.168.122.10 + export BRIDGE=br-ctlplane + BRIDGE=br-ctlplane + '[' libvirt = ironic ']' + BRIDGE_MAPPINGS=datacentre:br-ctlplane + NEUTRON_FLAT_NETWORKS=datacentre + cat + CMD='openstack tripleo deploy' + CMD_ARGS+=' --templates /usr/share/openstack-tripleo-heat-templates' + CMD_ARGS+=' --local-ip=192.168.122.100/24' + CMD_ARGS+=' --control-virtual-ip=192.168.122.99' + CMD_ARGS+=' --output-dir /root' + CMD_ARGS+=' --standalone-role Standalone' + CMD_ARGS+=' -r /root/Standalone.yaml' + CMD_ARGS+=' -n /root/network_data.yaml' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/standalone/standalone-tripleo.yaml' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/low-memory-usage.yaml' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/deployed-network-environment.yaml' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/cinder-backup.yaml' + '[' libvirt = ironic ']' + '[' true = true ']' + cat + ENV_ARGS+=' -e /root/enable_heat.yaml' + '[' false = true ']' + '[' true = true ']' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/manila-cephfsnative-config.yaml' + '[' true = true ']' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/services/octavia.yaml' + '[' false = true ']' + '[' true = true ']' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/enable-legacy-telemetry.yaml' + '[' true = true ']' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/ssl/tls-everywhere-endpoints-dns.yaml' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/services/haproxy-public-tls-certmonger.yaml' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/ssl/enable-internal-tls.yaml' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/ssl/enable-memcached-tls.yaml' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/ci/environments/standalone-ipa.yaml' + export IPA_ADMIN_USER=admin + IPA_ADMIN_USER=admin + export IPA_PRINCIPAL=admin + IPA_PRINCIPAL=admin + export IPA_ADMIN_PASSWORD=fce95318204114530f31f885c9df588f + IPA_ADMIN_PASSWORD=fce95318204114530f31f885c9df588f + export IPA_PASSWORD=fce95318204114530f31f885c9df588f + IPA_PASSWORD=fce95318204114530f31f885c9df588f + export UNDERCLOUD_FQDN=standalone.ooo.test + UNDERCLOUD_FQDN=standalone.ooo.test + export IPA_DOMAIN=ooo.test + IPA_DOMAIN=ooo.test ++ echo ooo.test ++ awk '{print toupper($0)}' + export IPA_REALM=OOO.TEST + IPA_REALM=OOO.TEST + export IPA_HOST=ipa.ooo.test + IPA_HOST=ipa.ooo.test + export IPA_SERVER_HOSTNAME=ipa.ooo.test + IPA_SERVER_HOSTNAME=ipa.ooo.test + mkdir /tmp/ipa-data + podman run -d --name freeipa-server-container -p 389:389 -p 636:636 --sysctl net.ipv6.conf.lo.disable_ipv6=0 --security-opt seccomp=unconfined --ip 10.88.0.2 -e IPA_SERVER_IP=10.88.0.2 -e PASSWORD=fc**********8f -h ipa.ooo.test --read-only --tmpfs /run --tmpfs /tmp -v /sys/fs/cgroup:/sys/fs/cgroup:ro -v /tmp/ipa-data:/data:Z quay.io/freeipa/freeipa-server:fedora-43 no-exit -U -r OOO.TEST --setup-dns --no-reverse --no-ntp --no-dnssec-validation --auto-forwarders Trying to pull quay.io/freeipa/freeipa-server:fedora-43... Getting image source signatures Copying blob sha256:cbe91ab70922c70fa9cda44ab0185024ff1b3cca9a097c2752f607181e954727 Copying blob sha256:d5e160db48e487347d0ea3aeb0d86356c478decda4c112b7c456d3d4b2befce0 Copying blob sha256:7c49113545ea06a936d3670a2ef8d450b37893b1378cea9ea4322d5a389d5079 Copying blob sha256:88a928f60608a53c81c8da7fbfc4972f9519351a88da0f688290cfe93730dc24 Copying blob sha256:439defec01bf44c8c5559004eb6414838dc17d3619c07baca6fc0f4f8940f574 Copying blob sha256:e89a79721d1130ef45e2d26df882ea998ef63d37e842ff701e0d9feb607ea0c4 Copying blob sha256:9195d6362392200fbf5dcf1eacd6041ae748448e8b9d2cbccac9bd27a314f398 Copying blob sha256:e2a2d502ede874fb25ed60deb93d5977258963a5c825eddfd6a5ebe7beeb3f84 Copying blob sha256:9eeb743b4c023b53b05c5524b368df34fd4129a5a9593299064a91ba6b999744 Copying blob sha256:888704d261cd4d4b0f9a9fb63f6f0afb38aa491747baac5dfbbbc4dc68019b28 Copying blob sha256:27c570e9989b5b78f783d302f491f83de2f8c818351eefe8097ec3ae11df4b96 Copying blob sha256:1c68a451571a36372a6bcdb4149b4c112aae7f19a88038318ce2bd242bf28795 Copying blob sha256:bb1de090caf7ed7df1f8b0aa6585a12a7db53f6e74d50500bf745fe594292698 Copying blob sha256:b7c07b64979201e60d2dc1860032222202f92d02fbc1752d5f6db1b39ee81ede Copying blob sha256:643506b84169f526e021e3f9de6d9d95ef50ee82e2d3de9f1a7bb24880276976 Copying blob sha256:ebd2857e5468297698e5a5ef98cc55688f5075a007eb5bb5258017b4166e622b Copying blob sha256:d1c37dbe12f4c769d86ea794f544027ae2d76492a25de1b94b122318917fab94 Copying blob sha256:6d274aad0fe170d899b37edf483372327140cf85a9e28048d123d60a6a6c1a01 Copying blob sha256:5c831cb9432dd0f3e9182fef0cb69f35b2c3f561b19b447402b7844a475a9a98 Copying blob sha256:f779ce97b32eaace38c2b751c116167bc977f84790c7ec26ad2718dcd7ec2c1a Copying blob sha256:5d4a6a0798f52ac53e25c3fec97fe3be89f074b3e92d34c4ecf52146dad1e512 Copying blob sha256:393e1b40b5926ce9b2e86c6263bc760e3561d9b720e8cddb1ee18dcd513a48ee Copying blob sha256:b83c6c6644a89c5b364a8b5c2e82c5690ef3fb63981affcf063f3ffae5aa47f2 Copying blob sha256:b6161e4517a0397f190778cad078a9faa2bdc6d88fff60506cdf332948f62321 Copying blob sha256:465f1b3de1e9ce21d8e6e2dc672a361818ad50103a3860d498814d13045a3621 Copying blob sha256:9e9ba8fd33ec3a10cf28e1f2e3ec3637cfd956c2d03ad63d6057d327cda774b3 Copying blob sha256:f407281b2df04296d5d6708644a7668b1c9e947e09b85fdebca6688d59b70394 Copying blob sha256:46c5770a9d18ce6929d27ff7ff0bddf210e3d169bfea347f735bf008f112d98e Copying blob sha256:ff2a29abe27befd65e6fa7f9b12c9a4fa92fa669b1f818190c7c8243e3e2531c Copying blob sha256:bd9ddc54bea929a22b334e73e026d4136e5b73f5cc29942896c72e4ece69b13d Copying blob sha256:bf200c272c1544b09b4a22742604e381264e90cbe2d4948b59c8f6363352b0bb Copying blob sha256:82da5600c3718769a71ce3968a7ff70ed639f7dc18bc503e6fbc5b70790bca71 Copying blob sha256:93c45fad43e59f7be4bd189d921b63b630f247d97cd70e7e9580c18574eb3468 Copying blob sha256:1f0d08f5836e3e627f58bfa63c56fbf244cf34d19ced7a5b2307c01faac967f7 Copying blob sha256:96aa7757d82180c0aff67c0edeec59e6f170d3f43964ccd136f57ad0b871d851 Copying blob sha256:41fd7cad95dc677b295fa8060cdc6afe90dc3c0b0d37f29e99e2623d1deab8d6 Copying blob sha256:ded4b489adf1496a9846f3ab83254eb1bab8a3a326c2ecba4bbe0c8cc7cf3901 Copying blob sha256:e4ee8a952bc599eeeac9585756d9930ebf7026cfe2ebfde25f1c6eb199751e17 Copying blob sha256:5705d18eef2f0b3b0c44a60257634183b75f732d84d33711ebf2d28a8a0839ba Copying blob sha256:7144f38e8fcfec2552b981029f7fb5e1ecd70aa054ee1e33c073c76ba9f4de08 Copying blob sha256:9bd855bcce51af050996b7e155098952541d79b9bcda11a2b181f4991343765c Copying blob sha256:571afcddd89b47572c64482ff0c9230f6bd96d6f81ad27eb9ac45a63b637c1af Copying blob sha256:eb7cb0f63bffc4fce091095542fe211583468dd51aa8385a802e14df7f4205b5 Copying blob sha256:1aee718e888dba5ac32ce9176eb6311d7f9cf68f6ae2b446d95aa1f85336dc59 Copying blob sha256:8b53f9e8b88655113aa150a46e0f8b68c1e46fde27f5d4bf7e47309dc9e5e5e8 Copying blob sha256:1556de32325a40f1ee853ca0d583607df37c1e9742f88157a0da3472be9476f5 Copying blob sha256:5c9d1c0514ce04b43ba29f633374ac3d55c14394e67da481d5e268be507eee78 Copying blob sha256:303583835711d639925128d1f516fe0ef2e2e95a11447508b6036da2856d7795 Copying blob sha256:a8a01ab64fa96f9d3e06e40bd6a6169b80fa2983c9044b84e62df2b96fc820fa Copying blob sha256:7b02583f49404127e1c8cd33228ace5aba85e6033907ead98e7f7b7872cbb84a Copying blob sha256:ef07d1111db544b6d0ae46e627f405ad49eb28b52c71190f82bdd195f8228ef2 Copying blob sha256:534e80d519bab90997604a1c67c6b01a8c36fa92a18d8800f019f6692eb16fea Copying config sha256:c95b079ed915edeae55c3c242a9279b86e31f7dd16ed1126654c2cf4d4b5de18 Writing manifest to image destination Storing signatures e8799a907778e324f927f9400bf414eb5a9ccee57dc5700adb988e17d5ed99fe + timeout 900s grep -qEi '(INFO The ipa-server-install command was successful|ERROR The ipa-server-install command failed)' /dev/fd/63 ++ tail -F /tmp/ipa-data/var/log/ipaserver-install.log tail: cannot open '/tmp/ipa-data/var/log/ipaserver-install.log' for reading: No such file or directory tail: '/tmp/ipa-data/var/log/ipaserver-install.log' has appeared; following new file + cat + cat /root/.ssh/id_rsa.pub + ansible-playbook /usr/share/ansible/tripleo-playbooks/undercloud-ipa-install.yaml [WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all' PLAY [Playbook to register the undercloud host with an IPA server] ************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [Ensure definitions] ****************************************************** skipping: [localhost] => (item={'name': 'ipa_domain', 'ansible_var': '', 'env_var': 'ooo.test'}) skipping: [localhost] => (item={'name': 'ipa_realm', 'ansible_var': '', 'env_var': 'OOO.TEST'}) skipping: [localhost] => (item={'name': 'ipa_server_user', 'ansible_var': '', 'env_var': 'admin'}) skipping: [localhost] => (item={'name': 'ipa_server_password', 'ansible_var': '', 'env_var': 'fce95318204114530f31f885c9df588f'}) skipping: [localhost] => (item={'name': 'ipa_server_hostname', 'ansible_var': '', 'env_var': 'ipa.ooo.test'}) skipping: [localhost] => (item={'name': 'undercloud_fqdn', 'ansible_var': '', 'env_var': 'standalone.ooo.test'}) skipping: [localhost] => (item={'name': 'ansible_user', 'ansible_var': '', 'env_var': 'root'}) skipping: [localhost] => (item={'name': 'cloud_domain', 'ansible_var': '', 'env_var': 'ooo.test'}) skipping: [localhost] TASK [Set facts needed for configuration] ************************************** ok: [localhost] TASK [Add host to ipaclients group] ******************************************** changed: [localhost] TASK [Add host to ipaservers group] ******************************************** changed: [localhost] PLAY [ipaclients] ************************************************************** TASK [Gathering Facts] ********************************************************* ok: [standalone.ooo.test] TASK [Include ipaclient role] ************************************************** TASK [ipaclient : Import variables specific to distribution] ******************* ok: [standalone.ooo.test] => (item=/usr/share/ansible/roles/ipaclient/vars/default.yml) TASK [ipaclient : Install IPA client] ****************************************** included: /usr/share/ansible/roles/ipaclient/tasks/install.yml for standalone.ooo.test TASK [ipaclient : Install - Ensure that IPA client packages are installed] ***** changed: [standalone.ooo.test] TASK [ipaclient : Install - Set ipaclient_servers] ***************************** ok: [standalone.ooo.test] TASK [ipaclient : Install - Set ipaclient_servers from cluster inventory] ****** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Check that either password or keytab is set] ******* skipping: [standalone.ooo.test] TASK [ipaclient : Install - Set default principal if no keytab is given] ******* skipping: [standalone.ooo.test] TASK [ipaclient : Install - Fail on missing ipaclient_domain and ipaserver_domain] *** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Fail on missing ipaclient_servers] ***************** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Configure DNS resolver] **************************** skipping: [standalone.ooo.test] TASK [ipaclient : Install - IPA client test] *********************************** ok: [standalone.ooo.test] TASK [ipaclient : Install - Cleanup leftover ccache] *************************** ok: [standalone.ooo.test] TASK [ipaclient : Install - Configure NTP] ************************************* changed: [standalone.ooo.test] TASK [ipaclient : Install - Make sure One-Time Password is enabled if it's already defined] *** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Disable One-Time Password for on_master] *********** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Test if IPA client has working krb5.keytab] ******** ok: [standalone.ooo.test] TASK [ipaclient : Install - Disable One-Time Password for client with working krb5.keytab] *** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Keytab or password is required for getting otp] **** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Create temporary file for keytab] ****************** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Copy keytab to server temporary file] ************** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Get One-Time Password for client enrollment] ******* skipping: [standalone.ooo.test] TASK [ipaclient : Install - Report error for OTP generation] ******************* skipping: [standalone.ooo.test] TASK [ipaclient : Install - Store the previously obtained OTP] ***************** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Remove keytab temporary file] ********************** skipping: [standalone.ooo.test] TASK [ipaclient : Store predefined OTP in admin_password] ********************** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Check if principal and keytab are set] ************* skipping: [standalone.ooo.test] TASK [ipaclient : Install - Check if one of password or keytabs are set] ******* skipping: [standalone.ooo.test] TASK [ipaclient : Install - From host keytab, purge OOO.TEST] ****************** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Backup and set hostname] *************************** changed: [standalone.ooo.test] TASK [ipaclient : Install - Create temporary krb5 configuration] *************** ok: [standalone.ooo.test] TASK [ipaclient : Install - Join IPA] ****************************************** changed: [standalone.ooo.test] TASK [ipaclient : The krb5 configuration is not correct] *********************** skipping: [standalone.ooo.test] TASK [ipaclient : IPA test failed] ********************************************* skipping: [standalone.ooo.test] TASK [ipaclient : Fail due to missing ca.crt file] ***************************** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Configure IPA default.conf] ************************ changed: [standalone.ooo.test] TASK [ipaclient : Install - Configure SSSD] ************************************ changed: [standalone.ooo.test] TASK [ipaclient : Install - IPA API calls for remaining enrollment parts] ****** changed: [standalone.ooo.test] TASK [ipaclient : Install - Fix IPA ca] **************************************** skipping: [standalone.ooo.test] TASK [ipaclient : Install - Create IPA NSS database] *************************** changed: [standalone.ooo.test] TASK [ipaclient : Install - Configure SSH and SSHD] **************************** changed: [standalone.ooo.test] TASK [ipaclient : Install - Configure automount] ******************************* changed: [standalone.ooo.test] TASK [ipaclient : Install - Configure firefox] ********************************* skipping: [standalone.ooo.test] TASK [ipaclient : Install - Configure NIS] ************************************* changed: [standalone.ooo.test] TASK [ipaclient : Remove temporary krb5.conf] ********************************** changed: [standalone.ooo.test] TASK [ipaclient : Install - Configure krb5 for IPA realm] ********************** changed: [standalone.ooo.test] TASK [ipaclient : Install - Configure certmonger] ****************************** changed: [standalone.ooo.test] TASK [ipaclient : Install - Restore original admin password if overwritten by OTP] *** skipping: [standalone.ooo.test] TASK [ipaclient : Cleanup leftover ccache] ************************************* ok: [standalone.ooo.test] TASK [ipaclient : Remove temporary krb5.conf] ********************************** ok: [standalone.ooo.test] TASK [ipaclient : Remove temporary krb5.conf backup] *************************** changed: [standalone.ooo.test] TASK [ipaclient : Uninstall IPA client] **************************************** skipping: [standalone.ooo.test] PLAY [localhost] *************************************************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [Include create admin roles] ********************************************** TASK [/usr/share/ansible/roles/tripleo_create_admin : create user tripleo-admin] *** changed: [localhost] TASK [/usr/share/ansible/roles/tripleo_create_admin : grant admin rights to user tripleo-admin] *** changed: [localhost] TASK [/usr/share/ansible/roles/tripleo_create_admin : ensure home dir has the right owner/group for user tripleo-admin] *** ok: [localhost] TASK [/usr/share/ansible/roles/tripleo_create_admin : ensure .ssh dir exists for user tripleo-admin] *** changed: [localhost] TASK [/usr/share/ansible/roles/tripleo_create_admin : ensure authorized_keys file exists for user tripleo-admin] *** changed: [localhost] TASK [/usr/share/ansible/roles/tripleo_create_admin : get remote tripleo-admin public key] *** skipping: [localhost] TASK [/usr/share/ansible/roles/tripleo_create_admin : append tripleo-admin public key to authorized_keys] *** skipping: [localhost] TASK [/usr/share/ansible/roles/tripleo_create_admin : authorize TripleO key for user tripleo-admin] *** skipping: [localhost] TASK [/usr/share/ansible/roles/tripleo_create_admin : Install private key on nodes for user tripleo-admin] *** skipping: [localhost] TASK [/usr/share/ansible/roles/tripleo_create_admin : Install public key on nodes for user tripleo-admin] *** skipping: [localhost] PLAY [localhost] *************************************************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [kinit to get admin credentials] ****************************************** changed: [localhost] TASK [setup the undercloud and get keytab] ************************************* [DEPRECATION WARNING]: "include" is deprecated, use include_tasks/import_tasks instead. See https://docs.ansible.com/ansible- core/2.14/user_guide/playbooks_reuse_includes.html for details. This feature will be removed in version 2.16. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. TASK [tripleo_ipa_setup : set keytab permissions facts] ************************ ok: [localhost] TASK [tripleo_ipa_setup : add nova host management permissions] **************** changed: [localhost] => (item={'name': 'Modify host password', 'right': 'write', 'type': 'host', 'attrs': ['userpassword']}) changed: [localhost] => (item={'name': 'Write host certificate', 'right': 'write', 'type': 'host', 'attrs': ['usercertificate']}) changed: [localhost] => (item={'name': 'Modify host userclass', 'right': 'write', 'type': 'host', 'attrs': ['userclass']}) changed: [localhost] => (item={'name': 'Modify service managedBy attribute', 'right': 'write', 'type': 'service', 'attrs': ['managedby']}) TASK [tripleo_ipa_setup : add Nova Host privilege] ***************************** changed: [localhost] TASK [tripleo_ipa_setup : add permissions to the Nova Host privilege] ********** changed: [localhost] => (item=System: add hosts) changed: [localhost] => (item=System: remove hosts) changed: [localhost] => (item=Modify host password) changed: [localhost] => (item=Modify host userclass) changed: [localhost] => (item=System: Modify hosts) changed: [localhost] => (item=Modify service managedBy attribute) changed: [localhost] => (item=System: Add krbPrincipalName to a Host) changed: [localhost] => (item=System: Add Services) changed: [localhost] => (item=System: Remove Services) changed: [localhost] => (item=Revoke certificate) changed: [localhost] => (item=System: manage host keytab) changed: [localhost] => (item=System: Manage host certificates) changed: [localhost] => (item=System: modify services) changed: [localhost] => (item=System: manage service keytab) changed: [localhost] => (item=System: read dns entries) changed: [localhost] => (item=System: remove dns entries) changed: [localhost] => (item=System: add dns entries) changed: [localhost] => (item=System: update dns entries) changed: [localhost] => (item=System: Modify Realm Domains) changed: [localhost] => (item=Retrieve Certificates from the CA) TASK [tripleo_ipa_setup : add Nova Host Manager role] ************************** changed: [localhost] TASK [tripleo_ipa_setup : set nova service user facts] ************************* ok: [localhost] TASK [tripleo_ipa_setup : add nova service] ************************************ changed: [localhost] TASK [tripleo_ipa_setup : add Nova Host Manager role] ************************** ok: [localhost] TASK [tripleo_ipa_setup : add service to the Nova Host Manager role] *********** changed: [localhost] TASK [tripleo_ipa_setup : set keytab permissions facts] ************************ ok: [localhost] TASK [tripleo_ipa_setup : add directory for keytab] **************************** changed: [localhost] TASK [tripleo_ipa_setup : get a keytab for the novajoin service] *************** changed: [localhost] TASK [tripleo_ipa_setup : chgrp and chmod the keytab] ************************** changed: [localhost] PLAY RECAP ********************************************************************* localhost : ok=25 changed=16 unreachable=0 failed=0 skipped=6 rescued=0 ignored=0 standalone.ooo.test : ok=25 changed=15 unreachable=0 failed=0 skipped=27 rescued=0 ignored=0 + ENV_ARGS+=' -e /root/standalone_parameters.yaml' + '[' false = true ']' + ENV_ARGS+=' -e /root/containers-prepare-parameters.yaml' + ENV_ARGS+=' -e /root/deployed_network.yaml' + '[' true = true ']' + ENV_ARGS+=' -e /usr/share/openstack-tripleo-heat-templates/environments/services/neutron-ovn-sriov.yaml' + ENV_ARGS+=' -e /root/sriov_template.yaml' + '[' false = true ']' + '[' false = true ']' + '[' true = true ']' + ENV_ARGS+=' -e /root/dhcp_agent_template.yaml' + sudo openstack tripleo deploy --templates /usr/share/openstack-tripleo-heat-templates --local-ip=192.168.122.100/24 --control-virtual-ip=192.168.122.99 --output-dir /root --standalone-role Standalone -r /root/Standalone.yaml -n /root/network_data.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/standalone/standalone-tripleo.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/low-memory-usage.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/deployed-network-environment.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/cinder-backup.yaml -e /root/enable_heat.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/manila-cephfsnative-config.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/services/octavia.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/enable-legacy-telemetry.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/ssl/tls-everywhere-endpoints-dns.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/services/haproxy-public-tls-certmonger.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/ssl/enable-internal-tls.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/ssl/enable-memcached-tls.yaml -e /usr/share/openstack-tripleo-heat-templates/ci/environments/standalone-ipa.yaml -e /root/standalone_parameters.yaml -e /root/containers-prepare-parameters.yaml -e /root/deployed_network.yaml -e /usr/share/openstack-tripleo-heat-templates/environments/services/neutron-ovn-sriov.yaml -e /root/sriov_template.yaml -e /root/dhcp_agent_template.yaml [WARNING] Deployment user is set to 'root'. This may cause some deployment files to be located in /root. Please use --deployment-user to specify the user you are deploying with. The heat stack standalone action is CREATE 2026-01-22 12:31:29.042 44751 INFO migrate.versioning.api [-] 72 -> 73...  2026-01-22 12:31:29.065 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.065 44751 INFO migrate.versioning.api [-] 73 -> 74...  2026-01-22 12:31:29.067 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.067 44751 INFO migrate.versioning.api [-] 74 -> 75...  2026-01-22 12:31:29.069 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.069 44751 INFO migrate.versioning.api [-] 75 -> 76...  2026-01-22 12:31:29.071 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.071 44751 INFO migrate.versioning.api [-] 76 -> 77...  2026-01-22 12:31:29.072 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.072 44751 INFO migrate.versioning.api [-] 77 -> 78...  2026-01-22 12:31:29.074 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.074 44751 INFO migrate.versioning.api [-] 78 -> 79...  2026-01-22 12:31:29.097 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.097 44751 INFO migrate.versioning.api [-] 79 -> 80...  2026-01-22 12:31:29.114 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.114 44751 INFO migrate.versioning.api [-] 80 -> 81...  2026-01-22 12:31:29.116 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.116 44751 INFO migrate.versioning.api [-] 81 -> 82...  2026-01-22 12:31:29.117 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.118 44751 INFO migrate.versioning.api [-] 82 -> 83...  2026-01-22 12:31:29.119 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.119 44751 INFO migrate.versioning.api [-] 83 -> 84...  2026-01-22 12:31:29.121 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.121 44751 INFO migrate.versioning.api [-] 84 -> 85...  2026-01-22 12:31:29.123 44751 INFO migrate.versioning.api [-] done 2026-01-22 12:31:29.123 44751 INFO migrate.versioning.api [-] 85 -> 86...  2026-01-22 12:31:29.144 44751 INFO migrate.versioning.api [-] done /usr/lib/python3.9/site-packages/oslo_policy/policy.py:775: UserWarning: Policy "actions:snapshot":"rule:deny_stack_user" was deprecated in W in favor of "actions:snapshot":"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)". Reason: The actions API now supports system scope and default roles. . Either ensure your deployment is ready for the new default or copy/paste the deprecated policy into your policy file and maintain it manually. warnings.warn(deprecated_msg) /usr/lib/python3.9/site-packages/oslo_policy/policy.py:775: UserWarning: Policy "actions:suspend":"rule:deny_stack_user" was deprecated in W in favor of "actions:suspend":"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)". Reason: The actions API now supports system scope and default roles. . Either ensure your deployment is ready for the new default or copy/paste the deprecated policy into your policy file and maintain it manually. warnings.warn(deprecated_msg) /usr/lib/python3.9/site-packages/oslo_policy/policy.py:775: UserWarning: Policy "actions:resume":"rule:deny_stack_user" was deprecated in W in favor of "actions:resume":"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)". Reason: The actions API now supports system scope and default roles. . Either ensure your deployment is ready for the new default or copy/paste the deprecated policy into your policy file and maintain it manually. warnings.warn(deprecated_msg) /usr/lib/python3.9/site-packages/oslo_policy/policy.py:775: UserWarning: Policy "actions:check":"rule:deny_stack_user" was deprecated in W in favor of "actions:check":"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)". Reason: The actions API now supports system scope and default roles. . Either ensure your deployment is ready for the new default or copy/paste the deprecated policy into your policy file and maintain it manually. warnings.warn(deprecated_msg) /usr/lib/python3.9/site-packages/oslo_policy/policy.py:775: UserWarning: Policy "actions:cancel_update":"rule:deny_stack_user" was deprecated in W in favor of "actions:cancel_update":"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)". Reason: The actions API now supports system scope and default roles. . Either ensure your deployment is ready for the new default or copy/paste the deprecated policy into your policy file and maintain it manually. warnings.warn(deprecated_msg) /usr/lib/python3.9/site-packages/oslo_policy/policy.py:775: UserWarning: Policy "actions:cancel_without_rollback":"rule:deny_stack_user" was deprecated in W in favor of "actions:cancel_without_rollback":"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)". Reason: The actions API now supports system scope and default roles. . Either ensure your deployment is ready for the new default or copy/paste the deprecated policy into your policy file and maintain it manually. warnings.warn(deprecated_msg) ** Handling template files ** jinja2 rendering normal template overcloud-resource-registry-puppet.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./overcloud-resource-registry-puppet.yaml jinja2 rendering normal template overcloud.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./overcloud.yaml jinja2 rendering normal template post.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./common/post.yaml jinja2 rendering role template role.role.j2.yaml jinja2 rendering roles Standalone rendering j2 template to file: /root/tripleo-heat-installer-templates/./common/services/standalone-role.yaml jinja2 rendering normal template apache-baremetal-puppet.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./deployment/apache/apache-baremetal-puppet.yaml jinja2 rendering normal template container-image-prepare-baremetal-ansible.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./deployment/container-image-prepare/container-image-prepare-baremetal-ansible.yaml jinja2 rendering normal template haproxy-internal-tls-certmonger.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./deployment/haproxy/haproxy-internal-tls-certmonger.yaml jinja2 rendering normal template octavia-deployment-config.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./deployment/octavia/octavia-deployment-config.yaml jinja2 rendering normal template deployed-server-environment.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/deployed-server-environment.yaml jinja2 rendering normal template net-noop.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/net-noop.yaml jinja2 rendering normal template deployed-network-environment.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/deployed-network-environment.yaml jinja2 rendering normal template net-2-linux-bonds-with-vlans.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/net-2-linux-bonds-with-vlans.yaml jinja2 rendering normal template net-bond-with-vlans-no-external.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/net-bond-with-vlans-no-external.yaml jinja2 rendering normal template net-bond-with-vlans.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/net-bond-with-vlans.yaml jinja2 rendering normal template net-dpdkbond-with-vlans.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/net-dpdkbond-with-vlans.yaml jinja2 rendering normal template net-multiple-nics-vlans.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/net-multiple-nics-vlans.yaml jinja2 rendering normal template net-multiple-nics.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/net-multiple-nics.yaml jinja2 rendering normal template net-single-nic-linux-bridge-with-vlans.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/net-single-nic-linux-bridge-with-vlans.yaml jinja2 rendering normal template net-single-nic-with-vlans-no-external.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/net-single-nic-with-vlans-no-external.yaml jinja2 rendering normal template net-single-nic-with-vlans.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/net-single-nic-with-vlans.yaml jinja2 rendering normal template network-environment-v6-all.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/network-environment-v6-all.yaml jinja2 rendering normal template network-environment-v6.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/network-environment-v6.yaml jinja2 rendering normal template network-environment.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/network-environment.yaml jinja2 rendering normal template networks-disable.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/networks-disable.yaml jinja2 rendering normal template overcloud-baremetal.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/overcloud-baremetal.yaml jinja2 rendering normal template split-stack-consistent-hostname-format.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/split-stack-consistent-hostname-format.yaml jinja2 rendering normal template enable-internal-tls.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./environments/ssl/enable-internal-tls.yaml jinja2 rendering normal template swap-partition.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./extraconfig/all_nodes/swap-partition.yaml jinja2 rendering normal template swap.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./extraconfig/all_nodes/swap.yaml jinja2 rendering role template role.role.j2.yaml jinja2 rendering roles Standalone rendering j2 template to file: /root/tripleo-heat-installer-templates/./extraconfig/nova_metadata/krb-service-principals/standalone-role.yaml jinja2 rendering network template network.network.j2.yaml jinja2 rendering networks Storage,StorageMgmt,InternalApi,Tenant,External rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/storage.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/storage_mgmt.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/internal_api.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/tenant.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/external.yaml jinja2 rendering network template network_v6.network.j2.yaml jinja2 rendering networks Storage,StorageMgmt,InternalApi,Tenant,External rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/storage_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/storage_mgmt_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/internal_api_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/tenant_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/external_v6.yaml jinja2 rendering normal template networks.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/networks.yaml jinja2 rendering normal template service_net_map.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/service_net_map.yaml jinja2 rendering network template deployed_port.network.j2.yaml jinja2 rendering networks Storage,StorageMgmt,InternalApi,Tenant,External rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/deployed_storage.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/deployed_storage_mgmt.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/deployed_internal_api.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/deployed_tenant.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/deployed_external.yaml jinja2 rendering network template deployed_vip_port.network.j2.yaml jinja2 rendering networks Storage,StorageMgmt,InternalApi,Tenant,External rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/deployed_vip_storage.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/deployed_vip_storage_mgmt.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/deployed_vip_internal_api.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/deployed_vip_tenant.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/deployed_vip_external.yaml jinja2 rendering network template external_resource_port.network.j2.yaml jinja2 rendering networks Storage,StorageMgmt,InternalApi,Tenant,External rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_resource_storage.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_resource_storage_mgmt.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_resource_internal_api.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_resource_tenant.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_resource_external.yaml jinja2 rendering network template external_resource_port_v6.network.j2.yaml jinja2 rendering networks Storage,StorageMgmt,InternalApi,Tenant,External rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_resource_storage_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_resource_storage_mgmt_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_resource_internal_api_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_resource_tenant_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_resource_external_v6.yaml jinja2 rendering normal template net_ip_list_map.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/net_ip_list_map.yaml jinja2 rendering normal template net_ip_map.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/net_ip_map.yaml jinja2 rendering normal template net_vip_map_external.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/net_vip_map_external.yaml jinja2 rendering normal template net_vip_map_external_v6.j2.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/net_vip_map_external_v6.yaml jinja2 rendering network template port.network.j2.yaml jinja2 rendering networks Storage,StorageMgmt,InternalApi,Tenant,External rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/storage.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/storage_mgmt.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/internal_api.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/tenant.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external.yaml jinja2 rendering network template port_from_pool.network.j2.yaml jinja2 rendering networks Storage,StorageMgmt,InternalApi,Tenant,External rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/storage_from_pool.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/storage_mgmt_from_pool.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/internal_api_from_pool.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/tenant_from_pool.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_from_pool.yaml jinja2 rendering network template port_from_pool_v6.network.j2.yaml jinja2 rendering networks Storage,StorageMgmt,InternalApi,Tenant,External rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/storage_from_pool_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/storage_mgmt_from_pool_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/internal_api_from_pool_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/tenant_from_pool_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_from_pool_v6.yaml jinja2 rendering network template port_v6.network.j2.yaml jinja2 rendering networks Storage,StorageMgmt,InternalApi,Tenant,External rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/storage_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/storage_mgmt_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/internal_api_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/tenant_v6.yaml rendering j2 template to file: /root/tripleo-heat-installer-templates/./network/ports/external_v6.yaml jinja2 rendering role template role.role.j2.yaml jinja2 rendering roles Standalone rendering j2 template to file: /root/tripleo-heat-installer-templates/./puppet/standalone-role.yaml ** Performing Heat stack create.. ** 2026-01-22 12:31:37Z [standalone]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:38Z [standalone.DeploymentServerBlacklistDict]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:38Z [standalone.DeploymentServerBlacklistDict]: CREATE_COMPLETE state changed 2026-01-22 12:31:39Z [standalone.NodeAdminUserData]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:39Z [standalone.NodeAdminUserData]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:39Z [standalone.NodeAdminUserData.user_config]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:39Z [standalone.NodeAdminUserData.user_config]: CREATE_COMPLETE state changed 2026-01-22 12:31:39Z [standalone.NodeAdminUserData.userdata]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:39Z [standalone.NodeAdminUserData.userdata]: CREATE_COMPLETE state changed 2026-01-22 12:31:39Z [standalone.NodeAdminUserData]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:31:40Z [standalone.NodeTimesyncUserData]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:40Z [standalone.NodeTimesyncUserData]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:40Z [standalone.NodeAdminUserData]: CREATE_COMPLETE state changed 2026-01-22 12:31:40Z [standalone.NodeTimesyncUserData.timesync_chrony]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:40Z [standalone.NodeTimesyncUserData.timesync_chrony]: CREATE_COMPLETE state changed 2026-01-22 12:31:41Z [standalone.ServiceNetMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:41Z [standalone.ServiceNetMap]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:41Z [standalone.ServiceNetMap.ServiceNetMapValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:41Z [standalone.NodeTimesyncUserData.timesync_sync]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:41Z [standalone.ServiceNetMap.ServiceNetMapValue]: CREATE_COMPLETE state changed 2026-01-22 12:31:41Z [standalone.ServiceNetMap]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:31:41Z [standalone.NodeTimesyncUserData.timesync_sync]: CREATE_COMPLETE state changed 2026-01-22 12:31:41Z [standalone.NodeTimesyncUserData.userdata]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:41Z [standalone.NodeTimesyncUserData.userdata]: CREATE_COMPLETE state changed 2026-01-22 12:31:41Z [standalone.NodeTimesyncUserData]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:31:42Z [standalone.NodeUserData]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:42Z [standalone.NodeUserData]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:42Z [standalone.ServiceNetMap]: CREATE_COMPLETE state changed 2026-01-22 12:31:42Z [standalone.NodeTimesyncUserData]: CREATE_COMPLETE state changed 2026-01-22 12:31:42Z [standalone.NodeUserData.userdata]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:42Z [standalone.NodeUserData.userdata]: CREATE_COMPLETE state changed 2026-01-22 12:31:42Z [standalone.NodeUserData]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:31:42Z [standalone.ControlVirtualIP]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:42Z [standalone.StandaloneServiceNetMapLower]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:42Z [standalone.StandaloneServiceNetMapLower]: CREATE_COMPLETE state changed 2026-01-22 12:31:43Z [standalone.StandaloneRoleUserData]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:43Z [standalone.StandaloneRoleUserData]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:43Z [standalone.NodeUserData]: CREATE_COMPLETE state changed 2026-01-22 12:31:43Z [standalone.StandaloneRoleUserData.userdata]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:43Z [standalone.StandaloneRoleUserData.userdata]: CREATE_COMPLETE state changed 2026-01-22 12:31:43Z [standalone.StandaloneRoleUserData]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:31:43Z [standalone.ControlVirtualIP]: CREATE_COMPLETE state changed 2026-01-22 12:31:43Z [standalone.Networks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:43Z [standalone.Networks]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:43Z [standalone.Networks.NetworkCidrMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:44Z [standalone.Networks.NetworkCidrMap]: CREATE_COMPLETE state changed 2026-01-22 12:31:44Z [standalone.CloudNames]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:44Z [standalone.CloudNames]: CREATE_COMPLETE state changed 2026-01-22 12:31:44Z [standalone.StandaloneRoleUserData]: CREATE_COMPLETE state changed 2026-01-22 12:31:44Z [standalone.StandaloneUserData]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:44Z [standalone.StandaloneUserData]: CREATE_COMPLETE state changed 2026-01-22 12:31:44Z [standalone.Networks.NetworkIpVersionMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:44Z [standalone.Networks.NetworkIpVersionMap]: CREATE_COMPLETE state changed 2026-01-22 12:31:45Z [standalone.Networks.NetworkExtraConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:45Z [standalone.Networks.NetworkExtraConfig]: CREATE_COMPLETE state changed 2026-01-22 12:31:46Z [standalone.Networks.NetworkAttributesMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:46Z [standalone.Networks.NetworkAttributesMap]: CREATE_COMPLETE state changed 2026-01-22 12:31:46Z [standalone.Networks]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:31:47Z [standalone.Networks]: CREATE_COMPLETE state changed 2026-01-22 12:31:48Z [standalone.StorageMgmtVirtualIP]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:48Z [standalone.PublicVirtualIP]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:48Z [standalone.StandaloneNetworkConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:48Z [standalone.StorageVirtualIP]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:48Z [standalone.NetIpVersionMapValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:48Z [standalone.StandaloneNetworkConfig]: CREATE_COMPLETE state changed 2026-01-22 12:31:48Z [standalone.NetCidrMapValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:48Z [standalone.NetIpVersionMapValue]: CREATE_COMPLETE state changed 2026-01-22 12:31:48Z [standalone.NetCidrMapValue]: CREATE_COMPLETE state changed 2026-01-22 12:31:48Z [standalone.InternalApiVirtualIP]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:49Z [standalone.StorageMgmtVirtualIP]: CREATE_COMPLETE state changed 2026-01-22 12:31:49Z [standalone.PublicVirtualIP]: CREATE_COMPLETE state changed 2026-01-22 12:31:49Z [standalone.StorageVirtualIP]: CREATE_COMPLETE state changed 2026-01-22 12:31:49Z [standalone.InternalApiVirtualIP]: CREATE_COMPLETE state changed 2026-01-22 12:31:49Z [standalone.VipMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:49Z [standalone.VipMap]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:49Z [standalone.VipMap.NetIpMapValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:49Z [standalone.VipMap.NetIpMapValue]: CREATE_COMPLETE state changed 2026-01-22 12:31:49Z [standalone.VipMap]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:31:50Z [standalone.VipMap]: CREATE_COMPLETE state changed 2026-01-22 12:31:50Z [standalone.VipHosts]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:50Z [standalone.EndpointMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:50Z [standalone.VipHosts]: CREATE_COMPLETE state changed 2026-01-22 12:31:52Z [standalone.EndpointMap]: CREATE_COMPLETE state changed 2026-01-22 12:31:52Z [standalone.EndpointMapData]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:52Z [standalone.EndpointMapData]: CREATE_COMPLETE state changed 2026-01-22 12:31:52Z [standalone.StandaloneServiceChain]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:52Z [standalone.StandaloneServiceChain]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:52Z [standalone.StandaloneServiceChain.ServiceChain]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:54Z [standalone.StandaloneServiceChain.ServiceChain]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:55Z [standalone.StandaloneServiceChain.ServiceChain.0]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:55Z [standalone.StandaloneServiceChain.ServiceChain.0]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:55Z [standalone.StandaloneServiceChain.ServiceChain.0.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:55Z [standalone.StandaloneServiceChain.ServiceChain.0.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:31:56Z [standalone.StandaloneServiceChain.ServiceChain.0.AodhBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:57Z [standalone.StandaloneServiceChain.ServiceChain.0.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:57Z [standalone.StandaloneServiceChain.ServiceChain.0.AodhBase]: CREATE_COMPLETE state changed 2026-01-22 12:31:58Z [standalone.StandaloneServiceChain.ServiceChain.0.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:58Z [standalone.StandaloneServiceChain.ServiceChain.0.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:31:59Z [standalone.StandaloneServiceChain.ServiceChain.0.ApacheServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:59Z [standalone.StandaloneServiceChain.ServiceChain.0.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:31:59Z [standalone.StandaloneServiceChain.ServiceChain.0.ApacheServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:31:59Z [standalone.StandaloneServiceChain.ServiceChain.0.ApacheServiceBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:31:59Z [standalone.StandaloneServiceChain.ServiceChain.0.ApacheServiceBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:31:59Z [standalone.StandaloneServiceChain.ServiceChain.0.ApacheServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:00Z [standalone.StandaloneServiceChain.ServiceChain.0.ApacheServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:00Z [standalone.StandaloneServiceChain.ServiceChain.0]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:01Z [standalone.StandaloneServiceChain.ServiceChain.0]: CREATE_COMPLETE state changed 2026-01-22 12:32:01Z [standalone.StandaloneServiceChain.ServiceChain.1]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:02Z [standalone.StandaloneServiceChain.ServiceChain.1]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:02Z [standalone.StandaloneServiceChain.ServiceChain.1.AodhBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:03Z [standalone.StandaloneServiceChain.ServiceChain.1.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:03Z [standalone.StandaloneServiceChain.ServiceChain.1.AodhBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:04Z [standalone.StandaloneServiceChain.ServiceChain.1.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:04Z [standalone.StandaloneServiceChain.ServiceChain.1.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:04Z [standalone.StandaloneServiceChain.ServiceChain.1.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:32:05Z [standalone.StandaloneServiceChain.ServiceChain.1.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:06Z [standalone.StandaloneServiceChain.ServiceChain.1.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:06Z [standalone.StandaloneServiceChain.ServiceChain.1]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:07Z [standalone.StandaloneServiceChain.ServiceChain.1]: CREATE_COMPLETE state changed 2026-01-22 12:32:07Z [standalone.StandaloneServiceChain.ServiceChain.2]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:07Z [standalone.StandaloneServiceChain.ServiceChain.2]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:07Z [standalone.StandaloneServiceChain.ServiceChain.2.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:07Z [standalone.StandaloneServiceChain.ServiceChain.2.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:08Z [standalone.StandaloneServiceChain.ServiceChain.2.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:09Z [standalone.StandaloneServiceChain.ServiceChain.2.AodhBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:09Z [standalone.StandaloneServiceChain.ServiceChain.2.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:10Z [standalone.StandaloneServiceChain.ServiceChain.2.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:10Z [standalone.StandaloneServiceChain.ServiceChain.2.AodhBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:11Z [standalone.StandaloneServiceChain.ServiceChain.2.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:32:11Z [standalone.StandaloneServiceChain.ServiceChain.2]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:12Z [standalone.StandaloneServiceChain.ServiceChain.2]: CREATE_COMPLETE state changed 2026-01-22 12:32:13Z [standalone.StandaloneServiceChain.ServiceChain.3]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:13Z [standalone.StandaloneServiceChain.ServiceChain.3]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:13Z [standalone.StandaloneServiceChain.ServiceChain.3.AodhBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:14Z [standalone.StandaloneServiceChain.ServiceChain.3.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:14Z [standalone.StandaloneServiceChain.ServiceChain.3.AodhBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:15Z [standalone.StandaloneServiceChain.ServiceChain.3.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:15Z [standalone.StandaloneServiceChain.ServiceChain.3.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:16Z [standalone.StandaloneServiceChain.ServiceChain.3.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:16Z [standalone.StandaloneServiceChain.ServiceChain.3.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:16Z [standalone.StandaloneServiceChain.ServiceChain.3.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:32:16Z [standalone.StandaloneServiceChain.ServiceChain.3]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:17Z [standalone.StandaloneServiceChain.ServiceChain.3]: CREATE_COMPLETE state changed 2026-01-22 12:32:17Z [standalone.StandaloneServiceChain.ServiceChain.4]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:17Z [standalone.StandaloneServiceChain.ServiceChain.4]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:17Z [standalone.StandaloneServiceChain.ServiceChain.4.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:17Z [standalone.StandaloneServiceChain.ServiceChain.4.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:17Z [standalone.StandaloneServiceChain.ServiceChain.4]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:18Z [standalone.StandaloneServiceChain.ServiceChain.4]: CREATE_COMPLETE state changed 2026-01-22 12:32:19Z [standalone.StandaloneServiceChain.ServiceChain.5]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:20Z [standalone.StandaloneServiceChain.ServiceChain.5]: CREATE_COMPLETE state changed 2026-01-22 12:32:20Z [standalone.StandaloneServiceChain.ServiceChain.6]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:20Z [standalone.StandaloneServiceChain.ServiceChain.6]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:20Z [standalone.StandaloneServiceChain.ServiceChain.6.CeilometerServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:21Z [standalone.StandaloneServiceChain.ServiceChain.6.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:22Z [standalone.StandaloneServiceChain.ServiceChain.6.CeilometerServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:22Z [standalone.StandaloneServiceChain.ServiceChain.6.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:22Z [standalone.StandaloneServiceChain.ServiceChain.6.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:23Z [standalone.StandaloneServiceChain.ServiceChain.6.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:23Z [standalone.StandaloneServiceChain.ServiceChain.6]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:23Z [standalone.StandaloneServiceChain.ServiceChain.6]: CREATE_COMPLETE state changed 2026-01-22 12:32:24Z [standalone.StandaloneServiceChain.ServiceChain.7]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:24Z [standalone.StandaloneServiceChain.ServiceChain.7]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:24Z [standalone.StandaloneServiceChain.ServiceChain.7.CeilometerServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:25Z [standalone.StandaloneServiceChain.ServiceChain.7.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:25Z [standalone.StandaloneServiceChain.ServiceChain.7.CeilometerServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:26Z [standalone.StandaloneServiceChain.ServiceChain.7.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:26Z [standalone.StandaloneServiceChain.ServiceChain.7.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:26Z [standalone.StandaloneServiceChain.ServiceChain.7.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:26Z [standalone.StandaloneServiceChain.ServiceChain.7]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:27Z [standalone.StandaloneServiceChain.ServiceChain.7]: CREATE_COMPLETE state changed 2026-01-22 12:32:27Z [standalone.StandaloneServiceChain.ServiceChain.8]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:27Z [standalone.StandaloneServiceChain.ServiceChain.8]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:28Z [standalone.StandaloneServiceChain.ServiceChain.8.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:29Z [standalone.StandaloneServiceChain.ServiceChain.8.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:29Z [standalone.StandaloneServiceChain.ServiceChain.8.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:32:30Z [standalone.StandaloneServiceChain.ServiceChain.8.ApacheServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:30Z [standalone.StandaloneServiceChain.ServiceChain.8.ApacheServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:30Z [standalone.StandaloneServiceChain.ServiceChain.8.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:30Z [standalone.StandaloneServiceChain.ServiceChain.8.ApacheServiceBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:30Z [standalone.StandaloneServiceChain.ServiceChain.8.ApacheServiceBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:32:30Z [standalone.StandaloneServiceChain.ServiceChain.8.ApacheServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:31Z [standalone.StandaloneServiceChain.ServiceChain.8.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:31Z [standalone.StandaloneServiceChain.ServiceChain.8.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:31Z [standalone.StandaloneServiceChain.ServiceChain.8.ApacheServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:32Z [standalone.StandaloneServiceChain.ServiceChain.8.CinderCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:32Z [standalone.StandaloneServiceChain.ServiceChain.8.CinderCommon]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:32Z [standalone.StandaloneServiceChain.ServiceChain.8.CinderCommon.CinderNVMeOF]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:33Z [standalone.StandaloneServiceChain.ServiceChain.8.CinderBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:33Z [standalone.StandaloneServiceChain.ServiceChain.8.CinderCommon.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:33Z [standalone.StandaloneServiceChain.ServiceChain.8.CinderCommon.CinderNVMeOF]: CREATE_COMPLETE state changed 2026-01-22 12:32:34Z [standalone.StandaloneServiceChain.ServiceChain.8.CinderBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:34Z [standalone.StandaloneServiceChain.ServiceChain.8.CinderCommon.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:34Z [standalone.StandaloneServiceChain.ServiceChain.8.CinderCommon]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:35Z [standalone.StandaloneServiceChain.ServiceChain.8.CinderCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:35Z [standalone.StandaloneServiceChain.ServiceChain.8]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:36Z [standalone.StandaloneServiceChain.ServiceChain.8]: CREATE_COMPLETE state changed 2026-01-22 12:32:36Z [standalone.StandaloneServiceChain.ServiceChain.9]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:36Z [standalone.StandaloneServiceChain.ServiceChain.9]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:36Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:36Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:37Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.CinderCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:37Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.CinderCommon]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:37Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.CinderCommon.CinderNVMeOF]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:37Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:37Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderCommon]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:37Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderCommon.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:37Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:37Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:38Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.CinderCommon.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:38Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.CinderCommon.CinderNVMeOF]: CREATE_COMPLETE state changed 2026-01-22 12:32:38Z [standalone.StandaloneServiceChain.ServiceChain.9.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:39Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderCommon.CinderNVMeOF]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:39Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:39Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderCommon.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:39Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.CinderCommon.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:39Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.CinderCommon]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:39Z [standalone.StandaloneServiceChain.ServiceChain.9.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:39Z [standalone.StandaloneServiceChain.ServiceChain.9.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:39Z [standalone.StandaloneServiceChain.ServiceChain.9.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:32:39Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.CinderBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:40Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.CinderCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:40Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderCommon.CinderNVMeOF]: CREATE_COMPLETE state changed 2026-01-22 12:32:40Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:32:40Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderCommon]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:40Z [standalone.StandaloneServiceChain.ServiceChain.9.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:40Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:41Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:41Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.CinderBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:41Z [standalone.StandaloneServiceChain.ServiceChain.9.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:42Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:42Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:42Z [standalone.StandaloneServiceChain.ServiceChain.9.CinderBackupBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:42Z [standalone.StandaloneServiceChain.ServiceChain.9]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:43Z [standalone.StandaloneServiceChain.ServiceChain.9]: CREATE_COMPLETE state changed 2026-01-22 12:32:44Z [standalone.StandaloneServiceChain.ServiceChain.10]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:44Z [standalone.StandaloneServiceChain.ServiceChain.10]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:44Z [standalone.StandaloneServiceChain.ServiceChain.10.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:44Z [standalone.StandaloneServiceChain.ServiceChain.10.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:45Z [standalone.StandaloneServiceChain.ServiceChain.10.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:46Z [standalone.StandaloneServiceChain.ServiceChain.10.CinderBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:47Z [standalone.StandaloneServiceChain.ServiceChain.10.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:32:48Z [standalone.StandaloneServiceChain.ServiceChain.10.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:48Z [standalone.StandaloneServiceChain.ServiceChain.10.CinderBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:48Z [standalone.StandaloneServiceChain.ServiceChain.10.CinderCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:49Z [standalone.StandaloneServiceChain.ServiceChain.10.CinderCommon]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:49Z [standalone.StandaloneServiceChain.ServiceChain.10.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:49Z [standalone.StandaloneServiceChain.ServiceChain.10.CinderCommon.CinderNVMeOF]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:50Z [standalone.StandaloneServiceChain.ServiceChain.10.CinderCommon.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:50Z [standalone.StandaloneServiceChain.ServiceChain.10.CinderCommon.CinderNVMeOF]: CREATE_COMPLETE state changed 2026-01-22 12:32:51Z [standalone.StandaloneServiceChain.ServiceChain.10.CinderCommon.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:51Z [standalone.StandaloneServiceChain.ServiceChain.10.CinderCommon]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:52Z [standalone.StandaloneServiceChain.ServiceChain.10.CinderCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:52Z [standalone.StandaloneServiceChain.ServiceChain.10]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:52Z [standalone.StandaloneServiceChain.ServiceChain.10]: CREATE_COMPLETE state changed 2026-01-22 12:32:53Z [standalone.StandaloneServiceChain.ServiceChain.11]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:53Z [standalone.StandaloneServiceChain.ServiceChain.11]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:53Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:53Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderCommon]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:53Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderCommon.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:54Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:54Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:54Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderCommon.CinderNVMeOF]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:55Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.CinderCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:55Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderCommon.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:55Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.CinderCommon]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:32:55Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.CinderCommon.CinderNVMeOF]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:55Z [standalone.StandaloneServiceChain.ServiceChain.11.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:55Z [standalone.StandaloneServiceChain.ServiceChain.11.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:56Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.CinderBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:56Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderCommon.CinderNVMeOF]: CREATE_COMPLETE state changed 2026-01-22 12:32:56Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderCommon]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:56Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.CinderCommon.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:56Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.CinderCommon.CinderNVMeOF]: CREATE_COMPLETE state changed 2026-01-22 12:32:56Z [standalone.StandaloneServiceChain.ServiceChain.11.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:56Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:56Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:57Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.CinderBase]: CREATE_COMPLETE state changed 2026-01-22 12:32:57Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.CinderCommon.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:57Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.CinderCommon]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:32:57Z [standalone.StandaloneServiceChain.ServiceChain.11.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:57Z [standalone.StandaloneServiceChain.ServiceChain.11.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:32:58Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:32:58Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:32:58Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:58Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.CinderCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:58Z [standalone.StandaloneServiceChain.ServiceChain.11.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:32:59Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:00Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:33:00Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:00Z [standalone.StandaloneServiceChain.ServiceChain.11.CinderBase]: CREATE_COMPLETE state changed 2026-01-22 12:33:00Z [standalone.StandaloneServiceChain.ServiceChain.11]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:01Z [standalone.StandaloneServiceChain.ServiceChain.11]: CREATE_COMPLETE state changed 2026-01-22 12:33:02Z [standalone.StandaloneServiceChain.ServiceChain.12]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:02Z [standalone.StandaloneServiceChain.ServiceChain.12]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:03Z [standalone.StandaloneServiceChain.ServiceChain.12.MysqlPuppetBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:03Z [standalone.StandaloneServiceChain.ServiceChain.12.MysqlPuppetBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:03Z [standalone.StandaloneServiceChain.ServiceChain.12.MysqlPuppetBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:03Z [standalone.StandaloneServiceChain.ServiceChain.12.MysqlPuppetBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:04Z [standalone.StandaloneServiceChain.ServiceChain.12.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:04Z [standalone.StandaloneServiceChain.ServiceChain.12.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:04Z [standalone.StandaloneServiceChain.ServiceChain.12.MysqlPuppetBase.MysqlBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:05Z [standalone.StandaloneServiceChain.ServiceChain.12.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:05Z [standalone.StandaloneServiceChain.ServiceChain.12.MysqlPuppetBase.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:05Z [standalone.StandaloneServiceChain.ServiceChain.12.MysqlPuppetBase.MysqlBase]: CREATE_COMPLETE state changed 2026-01-22 12:33:06Z [standalone.StandaloneServiceChain.ServiceChain.12.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:33:06Z [standalone.StandaloneServiceChain.ServiceChain.12.MysqlPuppetBase.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:33:06Z [standalone.StandaloneServiceChain.ServiceChain.12.MysqlPuppetBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:07Z [standalone.StandaloneServiceChain.ServiceChain.12.MysqlPuppetBase]: CREATE_COMPLETE state changed 2026-01-22 12:33:07Z [standalone.StandaloneServiceChain.ServiceChain.12]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:08Z [standalone.StandaloneServiceChain.ServiceChain.12]: CREATE_COMPLETE state changed 2026-01-22 12:33:08Z [standalone.StandaloneServiceChain.ServiceChain.13]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:08Z [standalone.StandaloneServiceChain.ServiceChain.13]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:08Z [standalone.StandaloneServiceChain.ServiceChain.13.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:09Z [standalone.StandaloneServiceChain.ServiceChain.13.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:09Z [standalone.StandaloneServiceChain.ServiceChain.13.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:09Z [standalone.StandaloneServiceChain.ServiceChain.13.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:33:10Z [standalone.StandaloneServiceChain.ServiceChain.13.CeilometerServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:11Z [standalone.StandaloneServiceChain.ServiceChain.13.CeilometerServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:33:11Z [standalone.StandaloneServiceChain.ServiceChain.13]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:12Z [standalone.StandaloneServiceChain.ServiceChain.13]: CREATE_COMPLETE state changed 2026-01-22 12:33:13Z [standalone.StandaloneServiceChain.ServiceChain.14]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:14Z [standalone.StandaloneServiceChain.ServiceChain.14]: CREATE_COMPLETE state changed 2026-01-22 12:33:14Z [standalone.StandaloneServiceChain.ServiceChain.15]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:14Z [standalone.StandaloneServiceChain.ServiceChain.15]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:14Z [standalone.StandaloneServiceChain.ServiceChain.15.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:14Z [standalone.StandaloneServiceChain.ServiceChain.15.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:15Z [standalone.StandaloneServiceChain.ServiceChain.15.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:16Z [standalone.StandaloneServiceChain.ServiceChain.15.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:33:16Z [standalone.StandaloneServiceChain.ServiceChain.15]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:17Z [standalone.StandaloneServiceChain.ServiceChain.15]: CREATE_COMPLETE state changed 2026-01-22 12:33:17Z [standalone.StandaloneServiceChain.ServiceChain.16]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:19Z [standalone.StandaloneServiceChain.ServiceChain.16]: CREATE_COMPLETE state changed 2026-01-22 12:33:19Z [standalone.StandaloneServiceChain.ServiceChain.17]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:19Z [standalone.StandaloneServiceChain.ServiceChain.17]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:19Z [standalone.StandaloneServiceChain.ServiceChain.17.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:19Z [standalone.StandaloneServiceChain.ServiceChain.17.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:20Z [standalone.StandaloneServiceChain.ServiceChain.17.TLSProxyBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:20Z [standalone.StandaloneServiceChain.ServiceChain.17.TLSProxyBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:20Z [standalone.StandaloneServiceChain.ServiceChain.17.TLSProxyBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:20Z [standalone.StandaloneServiceChain.ServiceChain.17.TLSProxyBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:33:20Z [standalone.StandaloneServiceChain.ServiceChain.17.TLSProxyBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:21Z [standalone.StandaloneServiceChain.ServiceChain.17.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:21Z [standalone.StandaloneServiceChain.ServiceChain.17.TLSProxyBase]: CREATE_COMPLETE state changed 2026-01-22 12:33:22Z [standalone.StandaloneServiceChain.ServiceChain.17.GlanceLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:22Z [standalone.StandaloneServiceChain.ServiceChain.17.GlanceLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:22Z [standalone.StandaloneServiceChain.ServiceChain.17.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:33:22Z [standalone.StandaloneServiceChain.ServiceChain.17.GlanceLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:22Z [standalone.StandaloneServiceChain.ServiceChain.17.GlanceLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:22Z [standalone.StandaloneServiceChain.ServiceChain.17.GlanceLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:23Z [standalone.StandaloneServiceChain.ServiceChain.17.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:23Z [standalone.StandaloneServiceChain.ServiceChain.17.GlanceLogging]: CREATE_COMPLETE state changed 2026-01-22 12:33:24Z [standalone.StandaloneServiceChain.ServiceChain.17.CinderNVMeOF]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:24Z [standalone.StandaloneServiceChain.ServiceChain.17.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:33:25Z [standalone.StandaloneServiceChain.ServiceChain.17.CinderNVMeOF]: CREATE_COMPLETE state changed 2026-01-22 12:33:25Z [standalone.StandaloneServiceChain.ServiceChain.17]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:26Z [standalone.StandaloneServiceChain.ServiceChain.17]: CREATE_COMPLETE state changed 2026-01-22 12:33:27Z [standalone.StandaloneServiceChain.ServiceChain.18]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:27Z [standalone.StandaloneServiceChain.ServiceChain.18]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:27Z [standalone.StandaloneServiceChain.ServiceChain.18.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:28Z [standalone.StandaloneServiceChain.ServiceChain.18.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:28Z [standalone.StandaloneServiceChain.ServiceChain.18.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:28Z [standalone.StandaloneServiceChain.ServiceChain.18.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:33:29Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:29Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:29Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.CinderNVMeOF]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:30Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:30Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:30Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:30Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:30Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:30Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:30Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:30Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.CinderNVMeOF]: CREATE_COMPLETE state changed 2026-01-22 12:33:31Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceLogging]: CREATE_COMPLETE state changed 2026-01-22 12:33:31Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.TLSProxyBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:31Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.TLSProxyBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:31Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.TLSProxyBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:31Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.TLSProxyBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:33:31Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.TLSProxyBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:32Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:32Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.TLSProxyBase]: CREATE_COMPLETE state changed 2026-01-22 12:33:33Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.GlanceLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:33Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.GlanceLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:33Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:33:33Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.GlanceLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:33Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.GlanceLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:33Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.GlanceLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:34Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:34Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.GlanceLogging]: CREATE_COMPLETE state changed 2026-01-22 12:33:35Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:33:35Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:36Z [standalone.StandaloneServiceChain.ServiceChain.18.GlanceApi]: CREATE_COMPLETE state changed 2026-01-22 12:33:36Z [standalone.StandaloneServiceChain.ServiceChain.18]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:37Z [standalone.StandaloneServiceChain.ServiceChain.18]: CREATE_COMPLETE state changed 2026-01-22 12:33:38Z [standalone.StandaloneServiceChain.ServiceChain.19]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:38Z [standalone.StandaloneServiceChain.ServiceChain.19]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:38Z [standalone.StandaloneServiceChain.ServiceChain.19.GnocchiServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:39Z [standalone.StandaloneServiceChain.ServiceChain.19.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:39Z [standalone.StandaloneServiceChain.ServiceChain.19.GnocchiServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:33:40Z [standalone.StandaloneServiceChain.ServiceChain.19.ApacheServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:40Z [standalone.StandaloneServiceChain.ServiceChain.19.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:33:40Z [standalone.StandaloneServiceChain.ServiceChain.19.ApacheServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:40Z [standalone.StandaloneServiceChain.ServiceChain.19.ApacheServiceBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:40Z [standalone.StandaloneServiceChain.ServiceChain.19.ApacheServiceBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:33:40Z [standalone.StandaloneServiceChain.ServiceChain.19.ApacheServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:41Z [standalone.StandaloneServiceChain.ServiceChain.19.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:41Z [standalone.StandaloneServiceChain.ServiceChain.19.ApacheServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:33:42Z [standalone.StandaloneServiceChain.ServiceChain.19.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:42Z [standalone.StandaloneServiceChain.ServiceChain.19.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:42Z [standalone.StandaloneServiceChain.ServiceChain.19.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:33:42Z [standalone.StandaloneServiceChain.ServiceChain.19]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:43Z [standalone.StandaloneServiceChain.ServiceChain.19]: CREATE_COMPLETE state changed 2026-01-22 12:33:43Z [standalone.StandaloneServiceChain.ServiceChain.20]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:43Z [standalone.StandaloneServiceChain.ServiceChain.20]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:43Z [standalone.StandaloneServiceChain.ServiceChain.20.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:43Z [standalone.StandaloneServiceChain.ServiceChain.20.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:44Z [standalone.StandaloneServiceChain.ServiceChain.20.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:46Z [standalone.StandaloneServiceChain.ServiceChain.20.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:46Z [standalone.StandaloneServiceChain.ServiceChain.20.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:33:46Z [standalone.StandaloneServiceChain.ServiceChain.20.GnocchiServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:47Z [standalone.StandaloneServiceChain.ServiceChain.20.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:33:48Z [standalone.StandaloneServiceChain.ServiceChain.20.GnocchiServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:33:48Z [standalone.StandaloneServiceChain.ServiceChain.20]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:48Z [standalone.StandaloneServiceChain.ServiceChain.20]: CREATE_COMPLETE state changed 2026-01-22 12:33:49Z [standalone.StandaloneServiceChain.ServiceChain.21]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:49Z [standalone.StandaloneServiceChain.ServiceChain.21]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:49Z [standalone.StandaloneServiceChain.ServiceChain.21.GnocchiServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:50Z [standalone.StandaloneServiceChain.ServiceChain.21.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:50Z [standalone.StandaloneServiceChain.ServiceChain.21.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:50Z [standalone.StandaloneServiceChain.ServiceChain.21.GnocchiServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:33:51Z [standalone.StandaloneServiceChain.ServiceChain.21.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:52Z [standalone.StandaloneServiceChain.ServiceChain.21.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:52Z [standalone.StandaloneServiceChain.ServiceChain.21.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:33:53Z [standalone.StandaloneServiceChain.ServiceChain.21.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:33:53Z [standalone.StandaloneServiceChain.ServiceChain.21]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:54Z [standalone.StandaloneServiceChain.ServiceChain.21]: CREATE_COMPLETE state changed 2026-01-22 12:33:55Z [standalone.StandaloneServiceChain.ServiceChain.22]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:55Z [standalone.StandaloneServiceChain.ServiceChain.22]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:55Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:55Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:55Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.HAProxyPublicTLS]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:56Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyPublicTLS]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:56Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:56Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:56Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.HAProxyPublicTLS]: CREATE_COMPLETE state changed 2026-01-22 12:33:57Z [standalone.StandaloneServiceChain.ServiceChain.22.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:57Z [standalone.StandaloneServiceChain.ServiceChain.22.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:33:57Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyPublicTLS]: CREATE_COMPLETE state changed 2026-01-22 12:33:57Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyInternalTLS]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyInternalTLS]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.HAProxyInternalTLS]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyInternalTLS.HAProxyNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyInternalTLS.HAProxyNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyInternalTLS]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.HAProxyInternalTLS]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.HAProxyInternalTLS.HAProxyNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.HAProxyInternalTLS.HAProxyNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:33:58Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.HAProxyInternalTLS]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:33:59Z [standalone.StandaloneServiceChain.ServiceChain.22.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:59Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyInternalTLS]: CREATE_COMPLETE state changed 2026-01-22 12:33:59Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.HAProxyLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:33:59Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.HAProxyInternalTLS]: CREATE_COMPLETE state changed 2026-01-22 12:34:00Z [standalone.StandaloneServiceChain.ServiceChain.22.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:00Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase.HAProxyLogging]: CREATE_COMPLETE state changed 2026-01-22 12:34:00Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:01Z [standalone.StandaloneServiceChain.ServiceChain.22.HAProxyBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:01Z [standalone.StandaloneServiceChain.ServiceChain.22]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:02Z [standalone.StandaloneServiceChain.ServiceChain.22]: CREATE_COMPLETE state changed 2026-01-22 12:34:03Z [standalone.StandaloneServiceChain.ServiceChain.23]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:03Z [standalone.StandaloneServiceChain.ServiceChain.23]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:03Z [standalone.StandaloneServiceChain.ServiceChain.23.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:04Z [standalone.StandaloneServiceChain.ServiceChain.23.ApacheServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:04Z [standalone.StandaloneServiceChain.ServiceChain.23.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:04Z [standalone.StandaloneServiceChain.ServiceChain.23.ApacheServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:04Z [standalone.StandaloneServiceChain.ServiceChain.23.ApacheServiceBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:04Z [standalone.StandaloneServiceChain.ServiceChain.23.ApacheServiceBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:34:04Z [standalone.StandaloneServiceChain.ServiceChain.23.ApacheServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:05Z [standalone.StandaloneServiceChain.ServiceChain.23.HeatApiLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:05Z [standalone.StandaloneServiceChain.ServiceChain.23.ApacheServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:06Z [standalone.StandaloneServiceChain.ServiceChain.23.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:06Z [standalone.StandaloneServiceChain.ServiceChain.23.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:06Z [standalone.StandaloneServiceChain.ServiceChain.23.HeatApiLogging]: CREATE_COMPLETE state changed 2026-01-22 12:34:07Z [standalone.StandaloneServiceChain.ServiceChain.23.HeatBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:08Z [standalone.StandaloneServiceChain.ServiceChain.23.HeatBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:08Z [standalone.StandaloneServiceChain.ServiceChain.23]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:09Z [standalone.StandaloneServiceChain.ServiceChain.23]: CREATE_COMPLETE state changed 2026-01-22 12:34:09Z [standalone.StandaloneServiceChain.ServiceChain.24]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:09Z [standalone.StandaloneServiceChain.ServiceChain.24]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:09Z [standalone.StandaloneServiceChain.ServiceChain.24.ApacheServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:09Z [standalone.StandaloneServiceChain.ServiceChain.24.ApacheServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:10Z [standalone.StandaloneServiceChain.ServiceChain.24.ApacheServiceBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:10Z [standalone.StandaloneServiceChain.ServiceChain.24.ApacheServiceBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:34:10Z [standalone.StandaloneServiceChain.ServiceChain.24.ApacheServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:10Z [standalone.StandaloneServiceChain.ServiceChain.24.HeatApiCfnLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:11Z [standalone.StandaloneServiceChain.ServiceChain.24.ApacheServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:11Z [standalone.StandaloneServiceChain.ServiceChain.24.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:12Z [standalone.StandaloneServiceChain.ServiceChain.24.HeatApiCfnLogging]: CREATE_COMPLETE state changed 2026-01-22 12:34:12Z [standalone.StandaloneServiceChain.ServiceChain.24.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:12Z [standalone.StandaloneServiceChain.ServiceChain.24.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:13Z [standalone.StandaloneServiceChain.ServiceChain.24.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:13Z [standalone.StandaloneServiceChain.ServiceChain.24.HeatBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:15Z [standalone.StandaloneServiceChain.ServiceChain.24.HeatBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:15Z [standalone.StandaloneServiceChain.ServiceChain.24]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:15Z [standalone.StandaloneServiceChain.ServiceChain.24]: CREATE_COMPLETE state changed 2026-01-22 12:34:16Z [standalone.StandaloneServiceChain.ServiceChain.25]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:16Z [standalone.StandaloneServiceChain.ServiceChain.25]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:16Z [standalone.StandaloneServiceChain.ServiceChain.25.HeatEngineLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:16Z [standalone.StandaloneServiceChain.ServiceChain.25.HeatEngineLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:16Z [standalone.StandaloneServiceChain.ServiceChain.25.HeatEngineLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:16Z [standalone.StandaloneServiceChain.ServiceChain.25.HeatEngineLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:16Z [standalone.StandaloneServiceChain.ServiceChain.25.HeatEngineLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:17Z [standalone.StandaloneServiceChain.ServiceChain.25.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:17Z [standalone.StandaloneServiceChain.ServiceChain.25.HeatEngineLogging]: CREATE_COMPLETE state changed 2026-01-22 12:34:18Z [standalone.StandaloneServiceChain.ServiceChain.25.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:18Z [standalone.StandaloneServiceChain.ServiceChain.25.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:19Z [standalone.StandaloneServiceChain.ServiceChain.25.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:19Z [standalone.StandaloneServiceChain.ServiceChain.25.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:19Z [standalone.StandaloneServiceChain.ServiceChain.25.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:34:20Z [standalone.StandaloneServiceChain.ServiceChain.25.HeatBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:21Z [standalone.StandaloneServiceChain.ServiceChain.25.HeatBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:21Z [standalone.StandaloneServiceChain.ServiceChain.25]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:22Z [standalone.StandaloneServiceChain.ServiceChain.25]: CREATE_COMPLETE state changed 2026-01-22 12:34:23Z [standalone.StandaloneServiceChain.ServiceChain.26]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:23Z [standalone.StandaloneServiceChain.ServiceChain.26]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:23Z [standalone.StandaloneServiceChain.ServiceChain.26.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:23Z [standalone.StandaloneServiceChain.ServiceChain.26.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:24Z [standalone.StandaloneServiceChain.ServiceChain.26.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:25Z [standalone.StandaloneServiceChain.ServiceChain.26.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:25Z [standalone.StandaloneServiceChain.ServiceChain.26]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:26Z [standalone.StandaloneServiceChain.ServiceChain.26]: CREATE_COMPLETE state changed 2026-01-22 12:34:26Z [standalone.StandaloneServiceChain.ServiceChain.27]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:27Z [standalone.StandaloneServiceChain.ServiceChain.27]: CREATE_COMPLETE state changed 2026-01-22 12:34:28Z [standalone.StandaloneServiceChain.ServiceChain.28]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:28Z [standalone.StandaloneServiceChain.ServiceChain.28]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:28Z [standalone.StandaloneServiceChain.ServiceChain.28.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:29Z [standalone.StandaloneServiceChain.ServiceChain.28.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:29Z [standalone.StandaloneServiceChain.ServiceChain.28.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:29Z [standalone.StandaloneServiceChain.ServiceChain.28.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:29Z [standalone.StandaloneServiceChain.ServiceChain.28]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:30Z [standalone.StandaloneServiceChain.ServiceChain.28]: CREATE_COMPLETE state changed 2026-01-22 12:34:30Z [standalone.StandaloneServiceChain.ServiceChain.29]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:30Z [standalone.StandaloneServiceChain.ServiceChain.29]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:30Z [standalone.StandaloneServiceChain.ServiceChain.29.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:30Z [standalone.StandaloneServiceChain.ServiceChain.29.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:30Z [standalone.StandaloneServiceChain.ServiceChain.29]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:31Z [standalone.StandaloneServiceChain.ServiceChain.29]: CREATE_COMPLETE state changed 2026-01-22 12:34:31Z [standalone.StandaloneServiceChain.ServiceChain.30]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:32Z [standalone.StandaloneServiceChain.ServiceChain.30]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:32Z [standalone.StandaloneServiceChain.ServiceChain.30.KeystoneLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:32Z [standalone.StandaloneServiceChain.ServiceChain.30.KeystoneLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:32Z [standalone.StandaloneServiceChain.ServiceChain.30.KeystoneLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:32Z [standalone.StandaloneServiceChain.ServiceChain.30.KeystoneLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:32Z [standalone.StandaloneServiceChain.ServiceChain.30.KeystoneLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:33Z [standalone.StandaloneServiceChain.ServiceChain.30.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:33Z [standalone.StandaloneServiceChain.ServiceChain.30.KeystoneLogging]: CREATE_COMPLETE state changed 2026-01-22 12:34:34Z [standalone.StandaloneServiceChain.ServiceChain.30.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:34Z [standalone.StandaloneServiceChain.ServiceChain.30.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:34Z [standalone.StandaloneServiceChain.ServiceChain.30.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:35Z [standalone.StandaloneServiceChain.ServiceChain.30.ApacheServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:35Z [standalone.StandaloneServiceChain.ServiceChain.30.ApacheServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:35Z [standalone.StandaloneServiceChain.ServiceChain.30.ApacheServiceBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:35Z [standalone.StandaloneServiceChain.ServiceChain.30.ApacheServiceBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:34:35Z [standalone.StandaloneServiceChain.ServiceChain.30.ApacheServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:36Z [standalone.StandaloneServiceChain.ServiceChain.30.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:36Z [standalone.StandaloneServiceChain.ServiceChain.30.ApacheServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:37Z [standalone.StandaloneServiceChain.ServiceChain.30.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:34:37Z [standalone.StandaloneServiceChain.ServiceChain.30]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:38Z [standalone.StandaloneServiceChain.ServiceChain.30]: CREATE_COMPLETE state changed 2026-01-22 12:34:38Z [standalone.StandaloneServiceChain.ServiceChain.31]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:38Z [standalone.StandaloneServiceChain.ServiceChain.31]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:39Z [standalone.StandaloneServiceChain.ServiceChain.31.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:39Z [standalone.StandaloneServiceChain.ServiceChain.31.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:40Z [standalone.StandaloneServiceChain.ServiceChain.31.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:41Z [standalone.StandaloneServiceChain.ServiceChain.31.ManilaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:41Z [standalone.StandaloneServiceChain.ServiceChain.31.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:34:42Z [standalone.StandaloneServiceChain.ServiceChain.31.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:42Z [standalone.StandaloneServiceChain.ServiceChain.31.ManilaBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:43Z [standalone.StandaloneServiceChain.ServiceChain.31.ApacheServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:43Z [standalone.StandaloneServiceChain.ServiceChain.31.ApacheServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:43Z [standalone.StandaloneServiceChain.ServiceChain.31.ApacheServiceBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:43Z [standalone.StandaloneServiceChain.ServiceChain.31.ApacheServiceBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:34:43Z [standalone.StandaloneServiceChain.ServiceChain.31.ApacheServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:43Z [standalone.StandaloneServiceChain.ServiceChain.31.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:44Z [standalone.StandaloneServiceChain.ServiceChain.31.ApacheServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:44Z [standalone.StandaloneServiceChain.ServiceChain.31]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:45Z [standalone.StandaloneServiceChain.ServiceChain.31]: CREATE_COMPLETE state changed 2026-01-22 12:34:45Z [standalone.StandaloneServiceChain.ServiceChain.32]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:46Z [standalone.StandaloneServiceChain.ServiceChain.32]: CREATE_COMPLETE state changed 2026-01-22 12:34:46Z [standalone.StandaloneServiceChain.ServiceChain.33]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:47Z [standalone.StandaloneServiceChain.ServiceChain.33]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:47Z [standalone.StandaloneServiceChain.ServiceChain.33.ManilaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:48Z [standalone.StandaloneServiceChain.ServiceChain.33.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:48Z [standalone.StandaloneServiceChain.ServiceChain.33.ManilaBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:49Z [standalone.StandaloneServiceChain.ServiceChain.33.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:49Z [standalone.StandaloneServiceChain.ServiceChain.33.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:49Z [standalone.StandaloneServiceChain.ServiceChain.33.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:34:50Z [standalone.StandaloneServiceChain.ServiceChain.33.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:51Z [standalone.StandaloneServiceChain.ServiceChain.33.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:51Z [standalone.StandaloneServiceChain.ServiceChain.33]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:52Z [standalone.StandaloneServiceChain.ServiceChain.33]: CREATE_COMPLETE state changed 2026-01-22 12:34:52Z [standalone.StandaloneServiceChain.ServiceChain.34]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:52Z [standalone.StandaloneServiceChain.ServiceChain.34]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:52Z [standalone.StandaloneServiceChain.ServiceChain.34.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:53Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:53Z [standalone.StandaloneServiceChain.ServiceChain.34.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:34:53Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:54Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:54Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:55Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.ManilaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:55Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaCommon]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:55Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:34:55Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaCommon.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:55Z [standalone.StandaloneServiceChain.ServiceChain.34.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:55Z [standalone.StandaloneServiceChain.ServiceChain.34.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:56Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:56Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.ManilaBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:56Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaCommon.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:56Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaCommon]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:56Z [standalone.StandaloneServiceChain.ServiceChain.34.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:56Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.ManilaShareCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:57Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.ManilaShareCommon]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:34:57Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:57Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:57Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.ManilaShareCommon.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:57Z [standalone.StandaloneServiceChain.ServiceChain.34.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:58Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:34:58Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:34:58Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.ManilaShareCommon.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:58Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.ManilaShareCommon]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:59Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase.ManilaShareCommon]: CREATE_COMPLETE state changed 2026-01-22 12:34:59Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:34:59Z [standalone.StandaloneServiceChain.ServiceChain.34.ManilaShareContainerBase]: CREATE_COMPLETE state changed 2026-01-22 12:34:59Z [standalone.StandaloneServiceChain.ServiceChain.34]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:00Z [standalone.StandaloneServiceChain.ServiceChain.34]: CREATE_COMPLETE state changed 2026-01-22 12:35:01Z [standalone.StandaloneServiceChain.ServiceChain.35]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:01Z [standalone.StandaloneServiceChain.ServiceChain.35]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:01Z [standalone.StandaloneServiceChain.ServiceChain.35.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:02Z [standalone.StandaloneServiceChain.ServiceChain.35.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:02Z [standalone.StandaloneServiceChain.ServiceChain.35.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:02Z [standalone.StandaloneServiceChain.ServiceChain.35.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:35:02Z [standalone.StandaloneServiceChain.ServiceChain.35]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:03Z [standalone.StandaloneServiceChain.ServiceChain.35]: CREATE_COMPLETE state changed 2026-01-22 12:35:04Z [standalone.StandaloneServiceChain.ServiceChain.36]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:04Z [standalone.StandaloneServiceChain.ServiceChain.36]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:04Z [standalone.StandaloneServiceChain.ServiceChain.36.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:05Z [standalone.StandaloneServiceChain.ServiceChain.36.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:05Z [standalone.StandaloneServiceChain.ServiceChain.36.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:05Z [standalone.StandaloneServiceChain.ServiceChain.36.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:35:06Z [standalone.StandaloneServiceChain.ServiceChain.36.MysqlBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:07Z [standalone.StandaloneServiceChain.ServiceChain.36.MysqlBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:07Z [standalone.StandaloneServiceChain.ServiceChain.36]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:08Z [standalone.StandaloneServiceChain.ServiceChain.36]: CREATE_COMPLETE state changed 2026-01-22 12:35:08Z [standalone.StandaloneServiceChain.ServiceChain.37]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:09Z [standalone.StandaloneServiceChain.ServiceChain.37]: CREATE_COMPLETE state changed 2026-01-22 12:35:10Z [standalone.StandaloneServiceChain.ServiceChain.38]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:10Z [standalone.StandaloneServiceChain.ServiceChain.38]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:10Z [standalone.StandaloneServiceChain.ServiceChain.38.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:11Z [standalone.StandaloneServiceChain.ServiceChain.38.TLSProxyBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:11Z [standalone.StandaloneServiceChain.ServiceChain.38.TLSProxyBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:11Z [standalone.StandaloneServiceChain.ServiceChain.38.TLSProxyBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:11Z [standalone.StandaloneServiceChain.ServiceChain.38.TLSProxyBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:35:11Z [standalone.StandaloneServiceChain.ServiceChain.38.TLSProxyBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:11Z [standalone.StandaloneServiceChain.ServiceChain.38.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:35:12Z [standalone.StandaloneServiceChain.ServiceChain.38.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:12Z [standalone.StandaloneServiceChain.ServiceChain.38.TLSProxyBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:13Z [standalone.StandaloneServiceChain.ServiceChain.38.NeutronBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:13Z [standalone.StandaloneServiceChain.ServiceChain.38.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:35:14Z [standalone.StandaloneServiceChain.ServiceChain.38.NeutronLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:14Z [standalone.StandaloneServiceChain.ServiceChain.38.NeutronLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:14Z [standalone.StandaloneServiceChain.ServiceChain.38.NeutronBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:14Z [standalone.StandaloneServiceChain.ServiceChain.38.NeutronLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:14Z [standalone.StandaloneServiceChain.ServiceChain.38.NeutronLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:14Z [standalone.StandaloneServiceChain.ServiceChain.38.NeutronLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:15Z [standalone.StandaloneServiceChain.ServiceChain.38.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:15Z [standalone.StandaloneServiceChain.ServiceChain.38.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:15Z [standalone.StandaloneServiceChain.ServiceChain.38.NeutronLogging]: CREATE_COMPLETE state changed 2026-01-22 12:35:15Z [standalone.StandaloneServiceChain.ServiceChain.38]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:16Z [standalone.StandaloneServiceChain.ServiceChain.38]: CREATE_COMPLETE state changed 2026-01-22 12:35:16Z [standalone.StandaloneServiceChain.ServiceChain.39]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:16Z [standalone.StandaloneServiceChain.ServiceChain.39]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:17Z [standalone.StandaloneServiceChain.ServiceChain.39.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:17Z [standalone.StandaloneServiceChain.ServiceChain.39.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:17Z [standalone.StandaloneServiceChain.ServiceChain.39.NeutronBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:18Z [standalone.StandaloneServiceChain.ServiceChain.39.NeutronBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:18Z [standalone.StandaloneServiceChain.ServiceChain.39.NeutronBase.NeutronMl2Base]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:18Z [standalone.StandaloneServiceChain.ServiceChain.39.NeutronBase.NeutronMl2Base]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:18Z [standalone.StandaloneServiceChain.ServiceChain.39.NeutronBase.NeutronMl2Base.NeutronBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:19Z [standalone.StandaloneServiceChain.ServiceChain.39.NeutronBase.NeutronMl2Base.NeutronBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:19Z [standalone.StandaloneServiceChain.ServiceChain.39.NeutronBase.NeutronMl2Base]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:20Z [standalone.StandaloneServiceChain.ServiceChain.39.NeutronBase.NeutronMl2Base]: CREATE_COMPLETE state changed 2026-01-22 12:35:20Z [standalone.StandaloneServiceChain.ServiceChain.39.NeutronBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:21Z [standalone.StandaloneServiceChain.ServiceChain.39.NeutronBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:21Z [standalone.StandaloneServiceChain.ServiceChain.39]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:21Z [standalone.StandaloneServiceChain.ServiceChain.39]: CREATE_COMPLETE state changed 2026-01-22 12:35:22Z [standalone.StandaloneServiceChain.ServiceChain.40]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:22Z [standalone.StandaloneServiceChain.ServiceChain.40]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:22Z [standalone.StandaloneServiceChain.ServiceChain.40.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:23Z [standalone.StandaloneServiceChain.ServiceChain.40.NeutronLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:23Z [standalone.StandaloneServiceChain.ServiceChain.40.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:35:24Z [standalone.StandaloneServiceChain.ServiceChain.40.NeutronBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:24Z [standalone.StandaloneServiceChain.ServiceChain.40.NeutronLogging]: CREATE_COMPLETE state changed 2026-01-22 12:35:25Z [standalone.StandaloneServiceChain.ServiceChain.40.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:25Z [standalone.StandaloneServiceChain.ServiceChain.40.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:25Z [standalone.StandaloneServiceChain.ServiceChain.40.NeutronBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:25Z [standalone.StandaloneServiceChain.ServiceChain.40]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:26Z [standalone.StandaloneServiceChain.ServiceChain.40]: CREATE_COMPLETE state changed 2026-01-22 12:35:27Z [standalone.StandaloneServiceChain.ServiceChain.41]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:27Z [standalone.StandaloneServiceChain.ServiceChain.41]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:27Z [standalone.StandaloneServiceChain.ServiceChain.41.NeutronBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:28Z [standalone.StandaloneServiceChain.ServiceChain.41.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:28Z [standalone.StandaloneServiceChain.ServiceChain.41.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:28Z [standalone.StandaloneServiceChain.ServiceChain.41.NeutronBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:29Z [standalone.StandaloneServiceChain.ServiceChain.41.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:30Z [standalone.StandaloneServiceChain.ServiceChain.41.NeutronLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:30Z [standalone.StandaloneServiceChain.ServiceChain.41.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:35:31Z [standalone.StandaloneServiceChain.ServiceChain.41.NeutronLogging]: CREATE_COMPLETE state changed 2026-01-22 12:35:31Z [standalone.StandaloneServiceChain.ServiceChain.41]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:32Z [standalone.StandaloneServiceChain.ServiceChain.41]: CREATE_COMPLETE state changed 2026-01-22 12:35:32Z [standalone.StandaloneServiceChain.ServiceChain.42]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:32Z [standalone.StandaloneServiceChain.ServiceChain.42]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:32Z [standalone.StandaloneServiceChain.ServiceChain.42.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:32Z [standalone.StandaloneServiceChain.ServiceChain.42.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:33Z [standalone.StandaloneServiceChain.ServiceChain.42.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:34Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaApiLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:35Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaApiLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:35Z [standalone.StandaloneServiceChain.ServiceChain.42.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:35:35Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaApiLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:35Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaApiLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:35Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaApiLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:35Z [standalone.StandaloneServiceChain.ServiceChain.42.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:36Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaApiLogging]: CREATE_COMPLETE state changed 2026-01-22 12:35:36Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:36Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:37Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:37Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:37Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:37Z [standalone.StandaloneServiceChain.ServiceChain.42.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:35:37Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaDBClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:38Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:38Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaApiDBClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:39Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaDBClient]: CREATE_COMPLETE state changed 2026-01-22 12:35:39Z [standalone.StandaloneServiceChain.ServiceChain.42.ApacheServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:40Z [standalone.StandaloneServiceChain.ServiceChain.42.ApacheServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:40Z [standalone.StandaloneServiceChain.ServiceChain.42.NovaApiDBClient]: CREATE_COMPLETE state changed 2026-01-22 12:35:40Z [standalone.StandaloneServiceChain.ServiceChain.42.ApacheServiceBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:40Z [standalone.StandaloneServiceChain.ServiceChain.42.ApacheServiceBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:35:40Z [standalone.StandaloneServiceChain.ServiceChain.42.ApacheServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:41Z [standalone.StandaloneServiceChain.ServiceChain.42.ApacheServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:41Z [standalone.StandaloneServiceChain.ServiceChain.42]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:41Z [standalone.StandaloneServiceChain.ServiceChain.42]: CREATE_COMPLETE state changed 2026-01-22 12:35:42Z [standalone.StandaloneServiceChain.ServiceChain.43]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:42Z [standalone.StandaloneServiceChain.ServiceChain.43]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:43Z [standalone.StandaloneServiceChain.ServiceChain.43.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:44Z [standalone.StandaloneServiceChain.ServiceChain.43.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:44Z [standalone.StandaloneServiceChain.ServiceChain.43.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:44Z [standalone.StandaloneServiceChain.ServiceChain.43.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:35:44Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:44Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:44Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaLogging.InitLogContainerName]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:44Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaLogging.InitLogContainerName]: CREATE_COMPLETE state changed 2026-01-22 12:35:45Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaApiDBClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:45Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:45Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:45Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:46Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:46Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:46Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaApiDBClient]: CREATE_COMPLETE state changed 2026-01-22 12:35:46Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:46Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:46Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:46Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaLogging]: CREATE_COMPLETE state changed 2026-01-22 12:35:47Z [standalone.StandaloneServiceChain.ServiceChain.43.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:47Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:48Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaDBClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:48Z [standalone.StandaloneServiceChain.ServiceChain.43.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:35:49Z [standalone.StandaloneServiceChain.ServiceChain.43.NovaDBClient]: CREATE_COMPLETE state changed 2026-01-22 12:35:49Z [standalone.StandaloneServiceChain.ServiceChain.43]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:49Z [standalone.StandaloneServiceChain.ServiceChain.43]: CREATE_COMPLETE state changed 2026-01-22 12:35:50Z [standalone.StandaloneServiceChain.ServiceChain.44]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:50Z [standalone.StandaloneServiceChain.ServiceChain.44]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:50Z [standalone.StandaloneServiceChain.ServiceChain.44.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:50Z [standalone.StandaloneServiceChain.ServiceChain.44.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:51Z [standalone.StandaloneServiceChain.ServiceChain.44.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:52Z [standalone.StandaloneServiceChain.ServiceChain.44.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:52Z [standalone.StandaloneServiceChain.ServiceChain.44.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:35:53Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:53Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:53Z [standalone.StandaloneServiceChain.ServiceChain.44.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:35:53Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:54Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:54Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:54Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaDBClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:54Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:55Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaApiDBClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:55Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaDBClient]: CREATE_COMPLETE state changed 2026-01-22 12:35:56Z [standalone.StandaloneServiceChain.ServiceChain.44.ApacheServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:56Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaApiDBClient]: CREATE_COMPLETE state changed 2026-01-22 12:35:56Z [standalone.StandaloneServiceChain.ServiceChain.44.ApacheServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:57Z [standalone.StandaloneServiceChain.ServiceChain.44.ApacheServiceBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:57Z [standalone.StandaloneServiceChain.ServiceChain.44.ApacheServiceBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:35:57Z [standalone.StandaloneServiceChain.ServiceChain.44.ApacheServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:57Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaMetadataLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:57Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaMetadataLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:35:58Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaMetadataLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:35:58Z [standalone.StandaloneServiceChain.ServiceChain.44.ApacheServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:35:58Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaMetadataLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:35:58Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaMetadataLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:58Z [standalone.StandaloneServiceChain.ServiceChain.44.NovaMetadataLogging]: CREATE_COMPLETE state changed 2026-01-22 12:35:58Z [standalone.StandaloneServiceChain.ServiceChain.44]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:35:59Z [standalone.StandaloneServiceChain.ServiceChain.44]: CREATE_COMPLETE state changed 2026-01-22 12:36:00Z [standalone.StandaloneServiceChain.ServiceChain.45]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:00Z [standalone.StandaloneServiceChain.ServiceChain.45]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:00Z [standalone.StandaloneServiceChain.ServiceChain.45.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:00Z [standalone.StandaloneServiceChain.ServiceChain.45.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:00Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:00Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:00Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:00Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:01Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaApiDBClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:01Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaLogging.InitLogContainerName]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:01Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaLogging.InitLogContainerName]: CREATE_COMPLETE state changed 2026-01-22 12:36:01Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:02Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaDBClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:02Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaApiDBClient]: CREATE_COMPLETE state changed 2026-01-22 12:36:02Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaLogging]: CREATE_COMPLETE state changed 2026-01-22 12:36:03Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:03Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:03Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaDBClient]: CREATE_COMPLETE state changed 2026-01-22 12:36:03Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:03Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:03Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:04Z [standalone.StandaloneServiceChain.ServiceChain.45.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:04Z [standalone.StandaloneServiceChain.ServiceChain.45.NovaBase]: CREATE_COMPLETE state changed 2026-01-22 12:36:05Z [standalone.StandaloneServiceChain.ServiceChain.45.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:05Z [standalone.StandaloneServiceChain.ServiceChain.45.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:36:06Z [standalone.StandaloneServiceChain.ServiceChain.45.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:06Z [standalone.StandaloneServiceChain.ServiceChain.45]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:07Z [standalone.StandaloneServiceChain.ServiceChain.45]: CREATE_COMPLETE state changed 2026-01-22 12:36:08Z [standalone.StandaloneServiceChain.ServiceChain.46]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:08Z [standalone.StandaloneServiceChain.ServiceChain.46]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:08Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaComputeCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:09Z [standalone.StandaloneServiceChain.ServiceChain.46.CinderNVMeOF]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:09Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaComputeCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:10Z [standalone.StandaloneServiceChain.ServiceChain.46.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:10Z [standalone.StandaloneServiceChain.ServiceChain.46.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:10Z [standalone.StandaloneServiceChain.ServiceChain.46.CinderNVMeOF]: CREATE_COMPLETE state changed 2026-01-22 12:36:10Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:10Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:10Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaLogging.InitLogContainerName]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:10Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaLogging.InitLogContainerName]: CREATE_COMPLETE state changed 2026-01-22 12:36:11Z [standalone.StandaloneServiceChain.ServiceChain.46.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:11Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:11Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:11Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:12Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:12Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:12Z [standalone.StandaloneServiceChain.ServiceChain.46.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:12Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:12Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:12Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:12Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaLogging]: CREATE_COMPLETE state changed 2026-01-22 12:36:13Z [standalone.StandaloneServiceChain.ServiceChain.46.NovaBase]: CREATE_COMPLETE state changed 2026-01-22 12:36:13Z [standalone.StandaloneServiceChain.ServiceChain.46]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:14Z [standalone.StandaloneServiceChain.ServiceChain.46]: CREATE_COMPLETE state changed 2026-01-22 12:36:15Z [standalone.StandaloneServiceChain.ServiceChain.47]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:15Z [standalone.StandaloneServiceChain.ServiceChain.47]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:15Z [standalone.StandaloneServiceChain.ServiceChain.47.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:16Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaLibvirtCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:16Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaLibvirtCommon]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:16Z [standalone.StandaloneServiceChain.ServiceChain.47.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:16Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaLibvirtCommon.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:16Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaLibvirtCommon.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:16Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaLibvirtCommon]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:17Z [standalone.StandaloneServiceChain.ServiceChain.47.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:17Z [standalone.StandaloneServiceChain.ServiceChain.47.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:17Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaLibvirtCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:18Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:18Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:18Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:18Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:18Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:19Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaLibvirtLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:19Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaBase]: CREATE_COMPLETE state changed 2026-01-22 12:36:20Z [standalone.StandaloneServiceChain.ServiceChain.47.NovaLibvirtLogging]: CREATE_COMPLETE state changed 2026-01-22 12:36:20Z [standalone.StandaloneServiceChain.ServiceChain.47]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:21Z [standalone.StandaloneServiceChain.ServiceChain.47]: CREATE_COMPLETE state changed 2026-01-22 12:36:22Z [standalone.StandaloneServiceChain.ServiceChain.48]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:22Z [standalone.StandaloneServiceChain.ServiceChain.48]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:22Z [standalone.StandaloneServiceChain.ServiceChain.48.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:23Z [standalone.StandaloneServiceChain.ServiceChain.48.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:23Z [standalone.StandaloneServiceChain.ServiceChain.48.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:23Z [standalone.StandaloneServiceChain.ServiceChain.48.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:24Z [standalone.StandaloneServiceChain.ServiceChain.48.SshdBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:25Z [standalone.StandaloneServiceChain.ServiceChain.48.SshdBase]: CREATE_COMPLETE state changed 2026-01-22 12:36:25Z [standalone.StandaloneServiceChain.ServiceChain.48]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:26Z [standalone.StandaloneServiceChain.ServiceChain.48]: CREATE_COMPLETE state changed 2026-01-22 12:36:26Z [standalone.StandaloneServiceChain.ServiceChain.49]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:26Z [standalone.StandaloneServiceChain.ServiceChain.49]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:26Z [standalone.StandaloneServiceChain.ServiceChain.49.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:27Z [standalone.StandaloneServiceChain.ServiceChain.49.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:27Z [standalone.StandaloneServiceChain.ServiceChain.49.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:27Z [standalone.StandaloneServiceChain.ServiceChain.49.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:27Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:28Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:28Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:28Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:28Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:28Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:28Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:29Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:29Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:29Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaLogging.InitLogContainerName]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:29Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaLogging.InitLogContainerName]: CREATE_COMPLETE state changed 2026-01-22 12:36:29Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:29Z [standalone.StandaloneServiceChain.ServiceChain.49.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:29Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaBase]: CREATE_COMPLETE state changed 2026-01-22 12:36:30Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaLogging]: CREATE_COMPLETE state changed 2026-01-22 12:36:30Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaDBClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:30Z [standalone.StandaloneServiceChain.ServiceChain.49.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:36:31Z [standalone.StandaloneServiceChain.ServiceChain.49.NovaDBClient]: CREATE_COMPLETE state changed 2026-01-22 12:36:31Z [standalone.StandaloneServiceChain.ServiceChain.49]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:32Z [standalone.StandaloneServiceChain.ServiceChain.49]: CREATE_COMPLETE state changed 2026-01-22 12:36:33Z [standalone.StandaloneServiceChain.ServiceChain.50]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:33Z [standalone.StandaloneServiceChain.ServiceChain.50]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:33Z [standalone.StandaloneServiceChain.ServiceChain.50.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:33Z [standalone.StandaloneServiceChain.ServiceChain.50.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:34Z [standalone.StandaloneServiceChain.ServiceChain.50.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:35Z [standalone.StandaloneServiceChain.ServiceChain.50.OVNBridgeMappingsValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:35Z [standalone.StandaloneServiceChain.ServiceChain.50.OVNBridgeMappingsValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:35Z [standalone.StandaloneServiceChain.ServiceChain.50.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:35Z [standalone.StandaloneServiceChain.ServiceChain.50]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:36Z [standalone.StandaloneServiceChain.ServiceChain.50]: CREATE_COMPLETE state changed 2026-01-22 12:36:36Z [standalone.StandaloneServiceChain.ServiceChain.51]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:36Z [standalone.StandaloneServiceChain.ServiceChain.51]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:37Z [standalone.StandaloneServiceChain.ServiceChain.51.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:38Z [standalone.StandaloneServiceChain.ServiceChain.51.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:38Z [standalone.StandaloneServiceChain.ServiceChain.51.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:38Z [standalone.StandaloneServiceChain.ServiceChain.51.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:38Z [standalone.StandaloneServiceChain.ServiceChain.51]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:38Z [standalone.StandaloneServiceChain.ServiceChain.51]: CREATE_COMPLETE state changed 2026-01-22 12:36:39Z [standalone.StandaloneServiceChain.ServiceChain.52]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:39Z [standalone.StandaloneServiceChain.ServiceChain.52]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:39Z [standalone.StandaloneServiceChain.ServiceChain.52.NeutronBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:40Z [standalone.StandaloneServiceChain.ServiceChain.52.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:40Z [standalone.StandaloneServiceChain.ServiceChain.52.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:40Z [standalone.StandaloneServiceChain.ServiceChain.52.NeutronBase]: CREATE_COMPLETE state changed 2026-01-22 12:36:41Z [standalone.StandaloneServiceChain.ServiceChain.52.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:42Z [standalone.StandaloneServiceChain.ServiceChain.52.NeutronLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:42Z [standalone.StandaloneServiceChain.ServiceChain.52.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:43Z [standalone.StandaloneServiceChain.ServiceChain.52.NeutronLogging]: CREATE_COMPLETE state changed 2026-01-22 12:36:43Z [standalone.StandaloneServiceChain.ServiceChain.52]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:44Z [standalone.StandaloneServiceChain.ServiceChain.52]: CREATE_COMPLETE state changed 2026-01-22 12:36:44Z [standalone.StandaloneServiceChain.ServiceChain.53]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:45Z [standalone.StandaloneServiceChain.ServiceChain.53]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:45Z [standalone.StandaloneServiceChain.ServiceChain.53.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:45Z [standalone.StandaloneServiceChain.ServiceChain.53.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:46Z [standalone.StandaloneServiceChain.ServiceChain.53.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:47Z [standalone.StandaloneServiceChain.ServiceChain.53.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:47Z [standalone.StandaloneServiceChain.ServiceChain.53.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:36:48Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:48Z [standalone.StandaloneServiceChain.ServiceChain.53.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:49Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaProviderConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:49Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaBase]: CREATE_COMPLETE state changed 2026-01-22 12:36:50Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:50Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:50Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaProviderConfig]: CREATE_COMPLETE state changed 2026-01-22 12:36:50Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:50Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:51Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:52Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker.OctaviaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:52Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:36:53Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:53Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker.OctaviaBase]: CREATE_COMPLETE state changed 2026-01-22 12:36:54Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:36:54Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:55Z [standalone.StandaloneServiceChain.ServiceChain.53.OctaviaWorker]: CREATE_COMPLETE state changed 2026-01-22 12:36:55Z [standalone.StandaloneServiceChain.ServiceChain.53]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:56Z [standalone.StandaloneServiceChain.ServiceChain.53]: CREATE_COMPLETE state changed 2026-01-22 12:36:56Z [standalone.StandaloneServiceChain.ServiceChain.54]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:56Z [standalone.StandaloneServiceChain.ServiceChain.54]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:56Z [standalone.StandaloneServiceChain.ServiceChain.54.OctaviaVars]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:56Z [standalone.StandaloneServiceChain.ServiceChain.54.OctaviaVars]: CREATE_COMPLETE state changed 2026-01-22 12:36:56Z [standalone.StandaloneServiceChain.ServiceChain.54]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:36:57Z [standalone.StandaloneServiceChain.ServiceChain.54]: CREATE_COMPLETE state changed 2026-01-22 12:36:58Z [standalone.StandaloneServiceChain.ServiceChain.55]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:58Z [standalone.StandaloneServiceChain.ServiceChain.55]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:36:58Z [standalone.StandaloneServiceChain.ServiceChain.55.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:36:58Z [standalone.StandaloneServiceChain.ServiceChain.55.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:36:59Z [standalone.StandaloneServiceChain.ServiceChain.55.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:00Z [standalone.StandaloneServiceChain.ServiceChain.55.OctaviaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:00Z [standalone.StandaloneServiceChain.ServiceChain.55.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:01Z [standalone.StandaloneServiceChain.ServiceChain.55.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:01Z [standalone.StandaloneServiceChain.ServiceChain.55.OctaviaBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:02Z [standalone.StandaloneServiceChain.ServiceChain.55.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:37:02Z [standalone.StandaloneServiceChain.ServiceChain.55]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:03Z [standalone.StandaloneServiceChain.ServiceChain.55]: CREATE_COMPLETE state changed 2026-01-22 12:37:03Z [standalone.StandaloneServiceChain.ServiceChain.56]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:03Z [standalone.StandaloneServiceChain.ServiceChain.56]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:04Z [standalone.StandaloneServiceChain.ServiceChain.56.OctaviaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:05Z [standalone.StandaloneServiceChain.ServiceChain.56.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:05Z [standalone.StandaloneServiceChain.ServiceChain.56.OctaviaBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:06Z [standalone.StandaloneServiceChain.ServiceChain.56.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:06Z [standalone.StandaloneServiceChain.ServiceChain.56.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:06Z [standalone.StandaloneServiceChain.ServiceChain.56.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:37:07Z [standalone.StandaloneServiceChain.ServiceChain.56.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:08Z [standalone.StandaloneServiceChain.ServiceChain.56.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:08Z [standalone.StandaloneServiceChain.ServiceChain.56]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:09Z [standalone.StandaloneServiceChain.ServiceChain.56]: CREATE_COMPLETE state changed 2026-01-22 12:37:09Z [standalone.StandaloneServiceChain.ServiceChain.57]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:09Z [standalone.StandaloneServiceChain.ServiceChain.57]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:09Z [standalone.StandaloneServiceChain.ServiceChain.57.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:09Z [standalone.StandaloneServiceChain.ServiceChain.57.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:10Z [standalone.StandaloneServiceChain.ServiceChain.57.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:11Z [standalone.StandaloneServiceChain.ServiceChain.57.OctaviaBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:11Z [standalone.StandaloneServiceChain.ServiceChain.57.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:12Z [standalone.StandaloneServiceChain.ServiceChain.57.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:12Z [standalone.StandaloneServiceChain.ServiceChain.57.OctaviaBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:13Z [standalone.StandaloneServiceChain.ServiceChain.57.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:37:13Z [standalone.StandaloneServiceChain.ServiceChain.57]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:14Z [standalone.StandaloneServiceChain.ServiceChain.57]: CREATE_COMPLETE state changed 2026-01-22 12:37:15Z [standalone.StandaloneServiceChain.ServiceChain.58]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:16Z [standalone.StandaloneServiceChain.ServiceChain.58]: CREATE_COMPLETE state changed 2026-01-22 12:37:16Z [standalone.StandaloneServiceChain.ServiceChain.59]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:16Z [standalone.StandaloneServiceChain.ServiceChain.59]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:16Z [standalone.StandaloneServiceChain.ServiceChain.59.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:17Z [standalone.StandaloneServiceChain.ServiceChain.59.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:17Z [standalone.StandaloneServiceChain.ServiceChain.59]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:18Z [standalone.StandaloneServiceChain.ServiceChain.59]: CREATE_COMPLETE state changed 2026-01-22 12:37:18Z [standalone.StandaloneServiceChain.ServiceChain.60]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:18Z [standalone.StandaloneServiceChain.ServiceChain.60]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:19Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:19Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:19Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.RabbitMQServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:19Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.RabbitMQServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:19Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.RabbitMQServiceBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:19Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.RabbitMQServiceBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:20Z [standalone.StandaloneServiceChain.ServiceChain.60.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:20Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:20Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:20Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.RabbitMQServiceBase.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:21Z [standalone.StandaloneServiceChain.ServiceChain.60.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:21Z [standalone.StandaloneServiceChain.ServiceChain.60.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:21Z [standalone.StandaloneServiceChain.ServiceChain.60.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:21Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:21Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.RabbitMQServiceBase.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:21Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.RabbitMQServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:22Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.RabbitMQServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:22Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:22Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:23Z [standalone.StandaloneServiceChain.ServiceChain.60.RabbitmqBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:23Z [standalone.StandaloneServiceChain.ServiceChain.60]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:24Z [standalone.StandaloneServiceChain.ServiceChain.60]: CREATE_COMPLETE state changed 2026-01-22 12:37:24Z [standalone.StandaloneServiceChain.ServiceChain.61]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:25Z [standalone.StandaloneServiceChain.ServiceChain.61]: CREATE_COMPLETE state changed 2026-01-22 12:37:26Z [standalone.StandaloneServiceChain.ServiceChain.62]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:26Z [standalone.StandaloneServiceChain.ServiceChain.62]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:26Z [standalone.StandaloneServiceChain.ServiceChain.62.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:27Z [standalone.StandaloneServiceChain.ServiceChain.62.PlacementLogging]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:27Z [standalone.StandaloneServiceChain.ServiceChain.62.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:27Z [standalone.StandaloneServiceChain.ServiceChain.62.PlacementLogging]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:27Z [standalone.StandaloneServiceChain.ServiceChain.62.PlacementLogging.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:27Z [standalone.StandaloneServiceChain.ServiceChain.62.PlacementLogging.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:27Z [standalone.StandaloneServiceChain.ServiceChain.62.PlacementLogging]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:28Z [standalone.StandaloneServiceChain.ServiceChain.62.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:28Z [standalone.StandaloneServiceChain.ServiceChain.62.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:28Z [standalone.StandaloneServiceChain.ServiceChain.62.PlacementLogging]: CREATE_COMPLETE state changed 2026-01-22 12:37:29Z [standalone.StandaloneServiceChain.ServiceChain.62.ApacheServiceBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:29Z [standalone.StandaloneServiceChain.ServiceChain.62.ApacheServiceBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:29Z [standalone.StandaloneServiceChain.ServiceChain.62.ApacheServiceBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:29Z [standalone.StandaloneServiceChain.ServiceChain.62.ApacheServiceBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:37:29Z [standalone.StandaloneServiceChain.ServiceChain.62.ApacheServiceBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:30Z [standalone.StandaloneServiceChain.ServiceChain.62.MySQLClient]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:30Z [standalone.StandaloneServiceChain.ServiceChain.62.ApacheServiceBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:31Z [standalone.StandaloneServiceChain.ServiceChain.62.MySQLClient]: CREATE_COMPLETE state changed 2026-01-22 12:37:31Z [standalone.StandaloneServiceChain.ServiceChain.62]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:32Z [standalone.StandaloneServiceChain.ServiceChain.62]: CREATE_COMPLETE state changed 2026-01-22 12:37:32Z [standalone.StandaloneServiceChain.ServiceChain.63]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:33Z [standalone.StandaloneServiceChain.ServiceChain.63]: CREATE_COMPLETE state changed 2026-01-22 12:37:34Z [standalone.StandaloneServiceChain.ServiceChain.64]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:34Z [standalone.StandaloneServiceChain.ServiceChain.64]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:34Z [standalone.StandaloneServiceChain.ServiceChain.64.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:35Z [standalone.StandaloneServiceChain.ServiceChain.64.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:35Z [standalone.StandaloneServiceChain.ServiceChain.64.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:35Z [standalone.StandaloneServiceChain.ServiceChain.64.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:36Z [standalone.StandaloneServiceChain.ServiceChain.64.RedisBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:36Z [standalone.StandaloneServiceChain.ServiceChain.64.RedisBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:36Z [standalone.StandaloneServiceChain.ServiceChain.64.RedisBase.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:37Z [standalone.StandaloneServiceChain.ServiceChain.64.RedisBase.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:37Z [standalone.StandaloneServiceChain.ServiceChain.64.RedisBase.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:37Z [standalone.StandaloneServiceChain.ServiceChain.64.RedisBase.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:38Z [standalone.StandaloneServiceChain.ServiceChain.64.RedisBase.RedisBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:39Z [standalone.StandaloneServiceChain.ServiceChain.64.RedisBase.RedisBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:39Z [standalone.StandaloneServiceChain.ServiceChain.64.RedisBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:40Z [standalone.StandaloneServiceChain.ServiceChain.64.RedisBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:40Z [standalone.StandaloneServiceChain.ServiceChain.64]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:41Z [standalone.StandaloneServiceChain.ServiceChain.64]: CREATE_COMPLETE state changed 2026-01-22 12:37:41Z [standalone.StandaloneServiceChain.ServiceChain.65]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:43Z [standalone.StandaloneServiceChain.ServiceChain.65]: CREATE_COMPLETE state changed 2026-01-22 12:37:43Z [standalone.StandaloneServiceChain.ServiceChain.66]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:44Z [standalone.StandaloneServiceChain.ServiceChain.66]: CREATE_COMPLETE state changed 2026-01-22 12:37:44Z [standalone.StandaloneServiceChain.ServiceChain.67]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:45Z [standalone.StandaloneServiceChain.ServiceChain.67]: CREATE_COMPLETE state changed 2026-01-22 12:37:46Z [standalone.StandaloneServiceChain.ServiceChain.68]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:46Z [standalone.StandaloneServiceChain.ServiceChain.68]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:46Z [standalone.StandaloneServiceChain.ServiceChain.68.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:47Z [standalone.StandaloneServiceChain.ServiceChain.68.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:47Z [standalone.StandaloneServiceChain.ServiceChain.68.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:47Z [standalone.StandaloneServiceChain.ServiceChain.68.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:48Z [standalone.StandaloneServiceChain.ServiceChain.68.SwiftBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:49Z [standalone.StandaloneServiceChain.ServiceChain.68.TLSProxyBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:49Z [standalone.StandaloneServiceChain.ServiceChain.68.TLSProxyBase]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:49Z [standalone.StandaloneServiceChain.ServiceChain.68.SwiftBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:49Z [standalone.StandaloneServiceChain.ServiceChain.68.TLSProxyBase.ApacheNetworks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:49Z [standalone.StandaloneServiceChain.ServiceChain.68.TLSProxyBase.ApacheNetworks]: CREATE_COMPLETE state changed 2026-01-22 12:37:49Z [standalone.StandaloneServiceChain.ServiceChain.68.TLSProxyBase]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:50Z [standalone.StandaloneServiceChain.ServiceChain.68.TLSProxyBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:50Z [standalone.StandaloneServiceChain.ServiceChain.68]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:51Z [standalone.StandaloneServiceChain.ServiceChain.68]: CREATE_COMPLETE state changed 2026-01-22 12:37:51Z [standalone.StandaloneServiceChain.ServiceChain.69]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:52Z [standalone.StandaloneServiceChain.ServiceChain.69]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:52Z [standalone.StandaloneServiceChain.ServiceChain.69.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:52Z [standalone.StandaloneServiceChain.ServiceChain.69.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:52Z [standalone.StandaloneServiceChain.ServiceChain.69]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:53Z [standalone.StandaloneServiceChain.ServiceChain.69]: CREATE_COMPLETE state changed 2026-01-22 12:37:53Z [standalone.StandaloneServiceChain.ServiceChain.70]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:53Z [standalone.StandaloneServiceChain.ServiceChain.70]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:37:53Z [standalone.StandaloneServiceChain.ServiceChain.70.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:53Z [standalone.StandaloneServiceChain.ServiceChain.70.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:37:54Z [standalone.StandaloneServiceChain.ServiceChain.70.ContainersCommon]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:55Z [standalone.StandaloneServiceChain.ServiceChain.70.SwiftBase]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:55Z [standalone.StandaloneServiceChain.ServiceChain.70.ContainersCommon]: CREATE_COMPLETE state changed 2026-01-22 12:37:56Z [standalone.StandaloneServiceChain.ServiceChain.70.SwiftBase]: CREATE_COMPLETE state changed 2026-01-22 12:37:56Z [standalone.StandaloneServiceChain.ServiceChain.70]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:37:57Z [standalone.StandaloneServiceChain.ServiceChain.70]: CREATE_COMPLETE state changed 2026-01-22 12:37:57Z [standalone.StandaloneServiceChain.ServiceChain.71]: CREATE_IN_PROGRESS state changed 2026-01-22 12:37:59Z [standalone.StandaloneServiceChain.ServiceChain.71]: CREATE_COMPLETE state changed 2026-01-22 12:37:59Z [standalone.StandaloneServiceChain.ServiceChain.72]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:00Z [standalone.StandaloneServiceChain.ServiceChain.72]: CREATE_COMPLETE state changed 2026-01-22 12:38:00Z [standalone.StandaloneServiceChain.ServiceChain.73]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:01Z [standalone.StandaloneServiceChain.ServiceChain.73]: CREATE_COMPLETE state changed 2026-01-22 12:38:02Z [standalone.StandaloneServiceChain.ServiceChain.74]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:02Z [standalone.StandaloneServiceChain.ServiceChain.74]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:02Z [standalone.StandaloneServiceChain.ServiceChain.74.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:02Z [standalone.StandaloneServiceChain.ServiceChain.74.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:38:02Z [standalone.StandaloneServiceChain.ServiceChain.74]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:03Z [standalone.StandaloneServiceChain.ServiceChain.74]: CREATE_COMPLETE state changed 2026-01-22 12:38:03Z [standalone.StandaloneServiceChain.ServiceChain.75]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:03Z [standalone.StandaloneServiceChain.ServiceChain.75]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:03Z [standalone.StandaloneServiceChain.ServiceChain.75.Podman]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:04Z [standalone.StandaloneServiceChain.ServiceChain.75.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:04Z [standalone.StandaloneServiceChain.ServiceChain.75.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:38:04Z [standalone.StandaloneServiceChain.ServiceChain.75.Podman]: CREATE_COMPLETE state changed 2026-01-22 12:38:04Z [standalone.StandaloneServiceChain.ServiceChain.75]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:05Z [standalone.StandaloneServiceChain.ServiceChain.75]: CREATE_COMPLETE state changed 2026-01-22 12:38:06Z [standalone.StandaloneServiceChain.ServiceChain.76]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:06Z [standalone.StandaloneServiceChain.ServiceChain.76]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:06Z [standalone.StandaloneServiceChain.ServiceChain.76.RoleParametersValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:06Z [standalone.StandaloneServiceChain.ServiceChain.76.RoleParametersValue]: CREATE_COMPLETE state changed 2026-01-22 12:38:06Z [standalone.StandaloneServiceChain.ServiceChain.76]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:07Z [standalone.StandaloneServiceChain.ServiceChain.76]: CREATE_COMPLETE state changed 2026-01-22 12:38:07Z [standalone.StandaloneServiceChain.ServiceChain]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:07Z [standalone.StandaloneServiceChain.ServiceChain]: CREATE_COMPLETE state changed 2026-01-22 12:38:09Z [standalone.StandaloneServiceChain.ServiceNames]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:09Z [standalone.StandaloneServiceChain.UpdateTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:10Z [standalone.StandaloneServiceChain.MonitoringSubscriptionsConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:10Z [standalone.StandaloneServiceChain.ServiceNames]: CREATE_COMPLETE state changed 2026-01-22 12:38:10Z [standalone.StandaloneServiceChain.ContainerPuppetTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:10Z [standalone.StandaloneServiceChain.UpdateTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:10Z [standalone.StandaloneServiceChain.DockerConfigScripts]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:11Z [standalone.StandaloneServiceChain.MonitoringSubscriptionsConfig]: CREATE_COMPLETE state changed 2026-01-22 12:38:11Z [standalone.StandaloneServiceChain.HostFirewallTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:11Z [standalone.StandaloneServiceChain.ContainerPuppetTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:11Z [standalone.StandaloneServiceChain.UpgradeTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:11Z [standalone.StandaloneServiceChain.DockerConfigScripts]: CREATE_COMPLETE state changed 2026-01-22 12:38:11Z [standalone.StandaloneServiceChain.AnsibleGroupVars]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:12Z [standalone.StandaloneServiceChain.HostFirewallTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:12Z [standalone.StandaloneServiceChain.KollaConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:12Z [standalone.StandaloneServiceChain.UpgradeTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:12Z [standalone.StandaloneServiceChain.UpgradeBatchTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:12Z [standalone.StandaloneServiceChain.AnsibleGroupVars]: CREATE_COMPLETE state changed 2026-01-22 12:38:13Z [standalone.StandaloneServiceChain.PreDeployStepTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:13Z [standalone.StandaloneServiceChain.KollaConfig]: CREATE_COMPLETE state changed 2026-01-22 12:38:13Z [standalone.StandaloneServiceChain.PreUpgradeRollingTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:13Z [standalone.StandaloneServiceChain.UpgradeBatchTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:13Z [standalone.StandaloneServiceChain.ExternalDeployTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:13Z [standalone.StandaloneServiceChain.PreDeployStepTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:13Z [standalone.StandaloneServiceChain.ServiceServerMetadataHook]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:14Z [standalone.StandaloneServiceChain.PreUpgradeRollingTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:14Z [standalone.StandaloneServiceChain.KeystoneResourcesConfigs]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:14Z [standalone.StandaloneServiceChain.ExternalDeployTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:14Z [standalone.StandaloneServiceChain.ExternalPostDeployTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:15Z [standalone.StandaloneServiceChain.FirewallFrontendRules]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:15Z [standalone.StandaloneServiceChain.KeystoneResourcesConfigs]: CREATE_COMPLETE state changed 2026-01-22 12:38:15Z [standalone.StandaloneServiceChain.IpaRegistrationTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:16Z [standalone.StandaloneServiceChain.ExternalPostDeployTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:16Z [standalone.StandaloneServiceChain.ScaleTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:16Z [standalone.StandaloneServiceChain.PuppetConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:16Z [standalone.StandaloneServiceChain.FirewallFrontendRules]: CREATE_COMPLETE state changed 2026-01-22 12:38:16Z [standalone.StandaloneServiceChain.ServiceServerMetadataHook]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:16Z [standalone.StandaloneServiceChain.IpaRegistrationTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:16Z [standalone.StandaloneServiceChain.DockerPuppetTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:17Z [standalone.StandaloneServiceChain.ScaleTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:17Z [standalone.StandaloneServiceChain.FirewallRules]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:17Z [standalone.StandaloneServiceChain.PuppetConfig]: CREATE_COMPLETE state changed 2026-01-22 12:38:17Z [standalone.StandaloneServiceChain.DeployStepsTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:17Z [standalone.StandaloneServiceChain.DockerPuppetTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:17Z [standalone.StandaloneServiceChain.PostUpdateTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:18Z [standalone.StandaloneServiceChain.FirewallRules]: CREATE_COMPLETE state changed 2026-01-22 12:38:18Z [standalone.StandaloneServiceChain.GlobalConfigSettings]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:18Z [standalone.StandaloneServiceChain.DeployStepsTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:18Z [standalone.StandaloneServiceChain.PostUpgradeTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:19Z [standalone.StandaloneServiceChain.PostUpdateTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:19Z [standalone.StandaloneServiceChain.ExternalUpdateTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:19Z [standalone.StandaloneServiceChain.GlobalConfigSettings]: CREATE_COMPLETE state changed 2026-01-22 12:38:19Z [standalone.StandaloneServiceChain.HostPrepTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:19Z [standalone.StandaloneServiceChain.PostUpgradeTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:19Z [standalone.StandaloneServiceChain.DockerConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.ExternalUpdateTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.ServiceConfigSettings]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.HostPrepTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.PuppetStepConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.DockerConfig]: CREATE_COMPLETE state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.ExternalUpgradeTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.ServiceConfigSettings]: CREATE_COMPLETE state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.PuppetStepConfig]: CREATE_COMPLETE state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.ExternalUpgradeTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.ServiceServerMetadataHook.IncomingMetadataSettings]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:20Z [standalone.StandaloneServiceChain.ServiceServerMetadataHook.IncomingMetadataSettings]: CREATE_COMPLETE state changed 2026-01-22 12:38:21Z [standalone.StandaloneServiceChain.ServiceServerMetadataHook.IndividualServices]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:21Z [standalone.StandaloneServiceChain.ServiceServerMetadataHook.CompactServices]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:21Z [standalone.StandaloneServiceChain.ServiceServerMetadataHook.IndividualServices]: CREATE_COMPLETE state changed 2026-01-22 12:38:21Z [standalone.StandaloneServiceChain.ServiceServerMetadataHook.CompactServices]: CREATE_COMPLETE state changed 2026-01-22 12:38:21Z [standalone.StandaloneServiceChain.ServiceServerMetadataHook]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:21Z [standalone.StandaloneServiceChain.ServiceServerMetadataHook]: CREATE_COMPLETE state changed 2026-01-22 12:38:21Z [standalone.StandaloneServiceChain]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:21Z [standalone.StandaloneServiceChain]: CREATE_COMPLETE state changed 2026-01-22 12:38:22Z [standalone.StandaloneServiceChainRoleData]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:22Z [standalone.StandaloneServiceChainRoleData]: CREATE_COMPLETE state changed 2026-01-22 12:38:22Z [standalone.GlobalConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:23Z [standalone.StandaloneServiceNames]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:23Z [standalone.GlobalConfig]: CREATE_COMPLETE state changed 2026-01-22 12:38:23Z [standalone.StandaloneServiceNames]: CREATE_COMPLETE state changed 2026-01-22 12:38:23Z [standalone.FirewallFrontendRules]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:23Z [standalone.FirewallFrontendRules]: CREATE_COMPLETE state changed 2026-01-22 12:38:23Z [standalone.Standalone]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:23Z [standalone.StandaloneServiceConfigSettings]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:23Z [standalone.StandaloneGroupVars]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:23Z [standalone.StandaloneServiceConfigSettings]: CREATE_COMPLETE state changed 2026-01-22 12:38:23Z [standalone.StandaloneGroupVars]: CREATE_COMPLETE state changed 2026-01-22 12:38:23Z [standalone.Standalone]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:24Z [standalone.Standalone]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:24Z [standalone.StandaloneConfigData]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:24Z [standalone.StandaloneConfigData]: CREATE_COMPLETE state changed 2026-01-22 12:38:25Z [standalone.Standalone]: UPDATE_IN_PROGRESS Stack UPDATE started 2026-01-22 12:38:25Z [standalone.Standalone.0]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:25Z [standalone.Standalone.0]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:25Z [standalone.Standalone.0.DeploymentActions]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:25Z [standalone.Standalone.0.DeploymentActions]: CREATE_COMPLETE state changed 2026-01-22 12:38:26Z [standalone.Standalone.0.Standalone]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:26Z [standalone.Standalone.0.Standalone]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:26Z [standalone.Standalone.0.Standalone.deployed-server]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:26Z [standalone.Standalone.0.Standalone.deployed-server]: CREATE_COMPLETE state changed 2026-01-22 12:38:27Z [standalone.Standalone.0.Standalone.ControlPlanePort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:28Z [standalone.Standalone.0.Standalone.ControlPlanePort]: CREATE_COMPLETE state changed 2026-01-22 12:38:28Z [standalone.Standalone.0.Standalone]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:29Z [standalone.Standalone.0.Standalone]: CREATE_COMPLETE state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.StoragePort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.NetHostMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.TenantPort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.StandaloneExtraConfigPre]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.ExternalPort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.NetHostMap]: CREATE_COMPLETE state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.InternalApiPort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.NodeTLSCAData]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.StoragePort]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:30Z [standalone.Standalone.0.TenantPort]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:30Z [standalone.Standalone.0.StorageMgmtPort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.PreNetworkConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.ExternalPort]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:30Z [standalone.Standalone.0.StandaloneAnsibleHostVars]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.NodeTLSCAData]: CREATE_COMPLETE state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.InternalApiPort]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:30Z [standalone.Standalone.0.StandaloneAnsibleHostVars]: CREATE_COMPLETE state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.PreNetworkConfig]: CREATE_COMPLETE state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.StoragePort.StorageDeployedPort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.TenantPort.TenantDeployedPort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.ExternalPort.ExternalDeployedPort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.StoragePort.StorageDeployedPort]: CREATE_COMPLETE state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.TenantPort.TenantDeployedPort]: CREATE_COMPLETE state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.InternalApiPort.InternalApiDeployedPort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [StorageMgmtDeployedPort]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.ExternalPort.ExternalDeployedPort]: CREATE_COMPLETE state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.StoragePort]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:30Z [standalone.Standalone.0.TenantPort]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:30Z [standalone.Standalone.0.InternalApiPort.InternalApiDeployedPort]: CREATE_COMPLETE state changed 2026-01-22 12:38:30Z [StorageMgmtDeployedPort]: CREATE_COMPLETE state changed 2026-01-22 12:38:30Z [standalone.Standalone.0.ExternalPort]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:30Z [standalone.Standalone.0.InternalApiPort]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:30Z [standalone.Standalone.0.StorageMgmtPort]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:31Z [standalone.Standalone.0.StoragePort]: CREATE_COMPLETE state changed 2026-01-22 12:38:31Z [standalone.Standalone.0.TenantPort]: CREATE_COMPLETE state changed 2026-01-22 12:38:31Z [standalone.Standalone.0.StandaloneExtraConfigPre]: CREATE_COMPLETE state changed 2026-01-22 12:38:31Z [standalone.Standalone.0.ExternalPort]: CREATE_COMPLETE state changed 2026-01-22 12:38:31Z [standalone.Standalone.0.InternalApiPort]: CREATE_COMPLETE state changed 2026-01-22 12:38:31Z [standalone.Standalone.0.NodeExtraConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:31Z [standalone.Standalone.0.StorageMgmtPort]: CREATE_COMPLETE state changed 2026-01-22 12:38:31Z [standalone.Standalone.0.NetIpMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:31Z [standalone.Standalone.0.NetIpMap]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:31Z [standalone.Standalone.0.NetIpMap.NetIpMapValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:31Z [standalone.Standalone.0.NetIpMap.NetIpMapValue]: CREATE_COMPLETE state changed 2026-01-22 12:38:31Z [standalone.Standalone.0.NetIpMap]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:32Z [standalone.Standalone.0.NodeExtraConfig]: CREATE_COMPLETE state changed 2026-01-22 12:38:32Z [standalone.Standalone.0.NetIpMap]: CREATE_COMPLETE state changed 2026-01-22 12:38:32Z [standalone.Standalone.0]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:33Z [standalone.Standalone.0]: CREATE_COMPLETE state changed 2026-01-22 12:38:33Z [standalone.Standalone]: UPDATE_COMPLETE Stack UPDATE completed successfully 2026-01-22 12:38:34Z [standalone.Standalone]: CREATE_COMPLETE state changed 2026-01-22 12:38:34Z [standalone.ServerIdMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:34Z [standalone.StandaloneServers]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:34Z [standalone.StandaloneNetworkHostnameMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:34Z [standalone.BlacklistedHostnames]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:34Z [standalone.HostsEntryValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:34Z [standalone.BlacklistedIpAddresses]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:34Z [standalone.ServerIdMap]: CREATE_COMPLETE state changed 2026-01-22 12:38:34Z [standalone.StandaloneServers]: CREATE_COMPLETE state changed 2026-01-22 12:38:34Z [standalone.StandaloneNetworkHostnameMap]: CREATE_COMPLETE state changed 2026-01-22 12:38:34Z [standalone.BlacklistedHostnames]: CREATE_COMPLETE state changed 2026-01-22 12:38:34Z [standalone.HostsEntryValue]: CREATE_COMPLETE state changed 2026-01-22 12:38:35Z [standalone.AnsibleHostVars]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:35Z [standalone.BlacklistedIpAddresses]: CREATE_COMPLETE state changed 2026-01-22 12:38:35Z [standalone.AnsibleHostVars]: CREATE_COMPLETE state changed 2026-01-22 12:38:35Z [standalone.AllNodesExtraConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:35Z [standalone.StandaloneIpListMap]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:35Z [standalone.AllNodesExtraConfig]: CREATE_COMPLETE state changed 2026-01-22 12:38:35Z [standalone.AllNodesDeploySteps]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:35Z [standalone.StandaloneIpListMap]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:35Z [standalone.AllNodesDeploySteps]: CREATE_IN_PROGRESS Stack CREATE started 2026-01-22 12:38:35Z [standalone.StandaloneIpListMap.EnabledServicesValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:35Z [standalone.StandaloneIpListMap.EnabledServicesValue]: CREATE_COMPLETE state changed 2026-01-22 12:38:35Z [standalone.AllNodesDeploySteps.ScaleTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:35Z [standalone.AllNodesDeploySteps.ScaleTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:36Z [standalone.StandaloneIpListMap.NetIpMapValue]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:36Z [standalone.StandaloneIpListMap.NetIpMapValue]: CREATE_COMPLETE state changed 2026-01-22 12:38:36Z [standalone.StandaloneIpListMap]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:36Z [standalone.AllNodesDeploySteps.ExternalDeployTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:36Z [standalone.AllNodesDeploySteps.ExternalDeployTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:37Z [standalone.StandaloneIpListMap]: CREATE_COMPLETE state changed 2026-01-22 12:38:37Z [standalone.AllNodesDeploySteps.StandaloneExtraConfigPost]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:38Z [standalone.AllNodesDeploySteps.ExternalUpgradeTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:38Z [standalone.AllNodesDeploySteps.ExternalUpgradeTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:38Z [standalone.AllNodesDeploySteps.StandaloneExtraConfigPost]: CREATE_COMPLETE state changed 2026-01-22 12:38:38Z [standalone.AllNodesDeploySteps.StandalonePostConfig]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:38Z [standalone.AllNodesDeploySteps.StandalonePostConfig]: CREATE_COMPLETE state changed 2026-01-22 12:38:39Z [standalone.AllNodesDeploySteps.BootstrapServerId]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:39Z [standalone.AllNodesDeploySteps.BootstrapServerId]: CREATE_COMPLETE state changed 2026-01-22 12:38:40Z [standalone.AllNodesDeploySteps.ExternalUpdateTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:40Z [standalone.AllNodesDeploySteps.ExternalUpdateTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:41Z [standalone.AllNodesDeploySteps.ExternalPostDeployTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:41Z [standalone.AllNodesDeploySteps.ExternalPostDeployTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:42Z [standalone.AllNodesDeploySteps.PreDeployStepTasks]: CREATE_IN_PROGRESS state changed 2026-01-22 12:38:42Z [standalone.AllNodesDeploySteps.PreDeployStepTasks]: CREATE_COMPLETE state changed 2026-01-22 12:38:42Z [standalone.AllNodesDeploySteps]: CREATE_COMPLETE Stack CREATE completed successfully 2026-01-22 12:38:43Z [standalone.AllNodesDeploySteps]: CREATE_COMPLETE state changed 2026-01-22 12:38:43Z [standalone]: CREATE_COMPLETE Stack CREATE completed successfully Stack standalone/0aca4d2d-78ee-42ea-b9c9-6fa4b1f209ae CREATE_COMPLETE ** Downloading standalone ansible.. ** Generating default ansible config file /root/ansible.cfg [DEPRECATION WARNING]: ANSIBLE_CALLBACK_WHITELIST option, normalizing names to new standard, use ANSIBLE_CALLBACKS_ENABLED instead. This feature will be removed from ansible-core in version 2.15. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. [DEPRECATION WARNING]: DEFAULT_GATHER_SUBSET option, the module_defaults keyword is a more generic version and can apply to all calls to the M(ansible.builtin.gather_facts) or M(ansible.builtin.setup) actions, use module_defaults instead. This feature will be removed from ansible-core in version 2.18. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. [DEPRECATION WARNING]: DEFAULT_GATHER_TIMEOUT option, the module_defaults keyword is a more generic version and can apply to all calls to the M(ansible.builtin.gather_facts) or M(ansible.builtin.setup) actions, use module_defaults instead. This feature will be removed from ansible-core in version 2.18. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. [WARNING]: Found both group and host with same name: standalone PLAY [External deployment step 0] ********************************************** [WARNING]: Using run_once with the tripleo_free strategy is not currently supported. This task will still be executed for every host in the inventory list. [WARNING]: any_errors_fatal only stops any future tasks running on the host that fails with the tripleo_free strategy. 2026-01-22 12:39:06.084094 | fa163e0d-6f45-64a1-ca76-000000000006 | TASK | External deployment step 0 2026-01-22 12:39:06.110124 | fa163e0d-6f45-64a1-ca76-000000000006 | OK | External deployment step 0 | undercloud -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'External deployment step 0' to resume from this task" } [WARNING]: ('undercloud -> localhost', 'fa163e0d-6f45-64a1-ca76-000000000006') missing from stats 2026-01-22 12:39:06.160499 | fa163e0d-6f45-64a1-ca76-000000000007 | TIMING | include_tasks | undercloud | 0:00:00.125509 | 0.03s 2026-01-22 12:39:06.166868 | 0aa45935-06a9-4fcb-a890-89acc4bead84 | INCLUDED | /root/standalone-ansible-mz1ymllk/external_deploy_steps_tasks_step0.yaml | undercloud 2026-01-22 12:39:06.181308 | fa163e0d-6f45-64a1-ca76-000000000127 | TASK | create ovn mac address for Standalone role nodes 2026-01-22 12:39:07.071430 | fa163e0d-6f45-64a1-ca76-000000000127 | OK | create ovn mac address for Standalone role nodes | undercloud 2026-01-22 12:39:07.072843 | fa163e0d-6f45-64a1-ca76-000000000127 | TIMING | create ovn mac address for Standalone role nodes | undercloud | 0:00:01.037852 | 0.89s 2026-01-22 12:39:07.090054 | fa163e0d-6f45-64a1-ca76-000000000129 | TASK | create redis virtual ip 2026-01-22 12:39:07.938484 | fa163e0d-6f45-64a1-ca76-000000000129 | CHANGED | create redis virtual ip | undercloud 2026-01-22 12:39:07.940005 | fa163e0d-6f45-64a1-ca76-000000000129 | TIMING | create redis virtual ip | undercloud | 0:00:01.905012 | 0.85s PLAY [Check if required variables are defined] ********************************* 2026-01-22 12:39:08.018052 | fa163e0d-6f45-64a1-ca76-000000000138 | TASK | Gathering Facts 2026-01-22 12:39:08.583856 | fa163e0d-6f45-64a1-ca76-000000000138 | OK | Gathering Facts | localhost 2026-01-22 12:39:08.585449 | fa163e0d-6f45-64a1-ca76-000000000138 | TIMING | Gathering Facts | localhost | 0:00:02.550451 | 0.57s 2026-01-22 12:39:08.640189 | fa163e0d-6f45-64a1-ca76-00000000000b | SKIPPED | ansible.builtin.fail | localhost 2026-01-22 12:39:08.641552 | fa163e0d-6f45-64a1-ca76-00000000000b | TIMING | ansible.builtin.fail | localhost | 0:00:02.606557 | 0.04s 2026-01-22 12:39:08.695008 | fa163e0d-6f45-64a1-ca76-00000000000c | SKIPPED | ansible.builtin.fail | localhost 2026-01-22 12:39:08.696293 | fa163e0d-6f45-64a1-ca76-00000000000c | TIMING | ansible.builtin.fail | localhost | 0:00:02.661300 | 0.05s PLAY [Clear cached facts] ****************************************************** PLAY [Gather facts] ************************************************************ 2026-01-22 12:39:08.808386 | fa163e0d-6f45-64a1-ca76-000000000160 | TASK | Gathering Facts 2026-01-22 12:39:08.817845 | fa163e0d-6f45-64a1-ca76-000000000160 | TASK | Gathering Facts 2026-01-22 12:39:09.147401 | fa163e0d-6f45-64a1-ca76-000000000160 | OK | Gathering Facts | undercloud 2026-01-22 12:39:09.154031 | fa163e0d-6f45-64a1-ca76-000000000160 | TIMING | Gathering Facts | undercloud | 0:00:03.119038 | 0.35s 2026-01-22 12:39:09.157093 | fa163e0d-6f45-64a1-ca76-000000000160 | OK | Gathering Facts | standalone 2026-01-22 12:39:09.158193 | fa163e0d-6f45-64a1-ca76-000000000160 | TIMING | Gathering Facts | standalone | 0:00:03.123207 | 0.34s 2026-01-22 12:39:09.219558 | fa163e0d-6f45-64a1-ca76-000000000012 | TASK | Set legacy facts 2026-01-22 12:39:09.240939 | fa163e0d-6f45-64a1-ca76-000000000012 | TASK | Set legacy facts 2026-01-22 12:39:09.260119 | fa163e0d-6f45-64a1-ca76-000000000012 | OK | Set legacy facts | undercloud 2026-01-22 12:39:09.267933 | fa163e0d-6f45-64a1-ca76-000000000012 | TIMING | Set legacy facts | undercloud | 0:00:03.232947 | 0.05s 2026-01-22 12:39:09.268447 | fa163e0d-6f45-64a1-ca76-000000000012 | OK | Set legacy facts | standalone 2026-01-22 12:39:09.269079 | fa163e0d-6f45-64a1-ca76-000000000012 | TIMING | Set legacy facts | standalone | 0:00:03.234098 | 0.03s PLAY [Load global variables] *************************************************** 2026-01-22 12:39:09.436085 | fa163e0d-6f45-64a1-ca76-000000000015 | OK | ansible.builtin.include_vars | undercloud 2026-01-22 12:39:09.437201 | fa163e0d-6f45-64a1-ca76-000000000015 | TIMING | ansible.builtin.include_vars | undercloud | 0:00:03.402207 | 0.05s 2026-01-22 12:39:09.444027 | fa163e0d-6f45-64a1-ca76-000000000015 | OK | ansible.builtin.include_vars | standalone 2026-01-22 12:39:09.444468 | fa163e0d-6f45-64a1-ca76-000000000015 | TIMING | ansible.builtin.include_vars | standalone | 0:00:03.409487 | 0.04s PLAY [Include extra variables from files] ************************************** 2026-01-22 12:39:09.515600 | fa163e0d-6f45-64a1-ca76-000000000018 | TASK | Include Service VIP vars 2026-01-22 12:39:09.542598 | fa163e0d-6f45-64a1-ca76-000000000018 | OK | Include Service VIP vars | undercloud 2026-01-22 12:39:09.543724 | fa163e0d-6f45-64a1-ca76-000000000018 | TIMING | Include Service VIP vars | undercloud | 0:00:03.508731 | 0.03s 2026-01-22 12:39:09.554323 | fa163e0d-6f45-64a1-ca76-000000000018 | OK | Include Service VIP vars | standalone 2026-01-22 12:39:09.554722 | fa163e0d-6f45-64a1-ca76-000000000018 | TIMING | Include Service VIP vars | standalone | 0:00:03.519744 | 0.03s 2026-01-22 12:39:09.558105 | fa163e0d-6f45-64a1-ca76-000000000019 | TASK | Include OVN bridge MAC address variables 2026-01-22 12:39:09.584515 | fa163e0d-6f45-64a1-ca76-000000000019 | OK | Include OVN bridge MAC address variables | undercloud 2026-01-22 12:39:09.585548 | fa163e0d-6f45-64a1-ca76-000000000019 | TIMING | Include OVN bridge MAC address variables | undercloud | 0:00:03.550553 | 0.03s 2026-01-22 12:39:09.596992 | fa163e0d-6f45-64a1-ca76-000000000019 | OK | Include OVN bridge MAC address variables | standalone 2026-01-22 12:39:09.597387 | fa163e0d-6f45-64a1-ca76-000000000019 | TIMING | Include OVN bridge MAC address variables | standalone | 0:00:03.562408 | 0.03s PLAY [Render all_nodes data as group_vars for overcloud] *********************** 2026-01-22 12:39:09.676992 | fa163e0d-6f45-64a1-ca76-00000000001c | TASK | Render all_nodes data as group_vars for overcloud 2026-01-22 12:39:10.414978 | fa163e0d-6f45-64a1-ca76-00000000001c | OK | Render all_nodes data as group_vars for overcloud | standalone -> localhost [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-00000000001c') missing from stats PLAY [Set all_nodes data as group_vars for overcloud] ************************** 2026-01-22 12:39:10.504718 | fa163e0d-6f45-64a1-ca76-00000000001f | TASK | Set all_nodes data as group_vars for overcloud 2026-01-22 12:39:10.539957 | fa163e0d-6f45-64a1-ca76-00000000001f | OK | Set all_nodes data as group_vars for overcloud | standalone 2026-01-22 12:39:10.540650 | fa163e0d-6f45-64a1-ca76-00000000001f | TIMING | Set all_nodes data as group_vars for overcloud | standalone | 0:00:04.505662 | 0.04s PLAY [Manage SELinux] ********************************************************** 2026-01-22 12:39:10.601460 | fa163e0d-6f45-64a1-ca76-000000000023 | TASK | Set selinux state 2026-01-22 12:39:11.140782 | fa163e0d-6f45-64a1-ca76-000000000023 | OK | Set selinux state | standalone 2026-01-22 12:39:11.142189 | fa163e0d-6f45-64a1-ca76-000000000023 | TIMING | Set selinux state | standalone | 0:00:05.107192 | 0.54s PLAY [Generate /etc/hosts] ***************************************************** 2026-01-22 12:39:11.283327 | fa163e0d-6f45-64a1-ca76-000000000026 | TASK | Configure Hosts Entries 2026-01-22 12:39:11.311068 | fa163e0d-6f45-64a1-ca76-000000000026 | TIMING | Configure Hosts Entries | undercloud | 0:00:05.276068 | 0.03s 2026-01-22 12:39:11.332574 | fa163e0d-6f45-64a1-ca76-000000000026 | TIMING | Configure Hosts Entries | standalone | 0:00:05.297585 | 0.02s 2026-01-22 12:39:11.375668 | fa163e0d-6f45-64a1-ca76-0000000001f1 | TASK | Create temporary file for hosts 2026-01-22 12:39:11.705345 | fa163e0d-6f45-64a1-ca76-0000000001f1 | CHANGED | Create temporary file for hosts | undercloud 2026-01-22 12:39:11.709430 | fa163e0d-6f45-64a1-ca76-0000000001f1 | TIMING | tripleo_hosts_entries : Create temporary file for hosts | undercloud | 0:00:05.674427 | 0.33s 2026-01-22 12:39:11.710537 | fa163e0d-6f45-64a1-ca76-0000000001f1 | CHANGED | Create temporary file for hosts | standalone 2026-01-22 12:39:11.711437 | fa163e0d-6f45-64a1-ca76-0000000001f1 | TIMING | tripleo_hosts_entries : Create temporary file for hosts | standalone | 0:00:05.676450 | 0.32s 2026-01-22 12:39:11.730790 | fa163e0d-6f45-64a1-ca76-0000000001f2 | TASK | Prepare temporary /etc/hosts 2026-01-22 12:39:12.122256 | fa163e0d-6f45-64a1-ca76-0000000001f2 | CHANGED | Prepare temporary /etc/hosts | standalone 2026-01-22 12:39:12.123192 | fa163e0d-6f45-64a1-ca76-0000000001f2 | TIMING | tripleo_hosts_entries : Prepare temporary /etc/hosts | standalone | 0:00:06.088206 | 0.37s 2026-01-22 12:39:12.123712 | fa163e0d-6f45-64a1-ca76-0000000001f2 | CHANGED | Prepare temporary /etc/hosts | undercloud 2026-01-22 12:39:12.124293 | fa163e0d-6f45-64a1-ca76-0000000001f2 | TIMING | tripleo_hosts_entries : Prepare temporary /etc/hosts | undercloud | 0:00:06.089315 | 0.39s 2026-01-22 12:39:12.138604 | fa163e0d-6f45-64a1-ca76-0000000001f3 | TASK | Remove old Heat hosts configuration (if present) 2026-01-22 12:39:12.503322 | fa163e0d-6f45-64a1-ca76-0000000001f3 | OK | Remove old Heat hosts configuration (if present) | undercloud 2026-01-22 12:39:12.504154 | fa163e0d-6f45-64a1-ca76-0000000001f3 | TIMING | tripleo_hosts_entries : Remove old Heat hosts configuration (if present) | undercloud | 0:00:06.469170 | 0.36s 2026-01-22 12:39:12.504674 | fa163e0d-6f45-64a1-ca76-0000000001f3 | OK | Remove old Heat hosts configuration (if present) | standalone 2026-01-22 12:39:12.505067 | fa163e0d-6f45-64a1-ca76-0000000001f3 | TIMING | tripleo_hosts_entries : Remove old Heat hosts configuration (if present) | standalone | 0:00:06.470089 | 0.35s 2026-01-22 12:39:12.519041 | fa163e0d-6f45-64a1-ca76-0000000001f4 | TASK | Render out the hosts entries 2026-01-22 12:39:12.644762 | fa163e0d-6f45-64a1-ca76-0000000001f4 | OK | Render out the hosts entries | undercloud 2026-01-22 12:39:12.645846 | fa163e0d-6f45-64a1-ca76-0000000001f4 | TIMING | tripleo_hosts_entries : Render out the hosts entries | undercloud | 0:00:06.610853 | 0.13s 2026-01-22 12:39:12.663954 | fa163e0d-6f45-64a1-ca76-0000000001f5 | TASK | Prepare new /etc/hosts 2026-01-22 12:39:12.924410 | fa163e0d-6f45-64a1-ca76-0000000001f5 | CHANGED | Prepare new /etc/hosts | standalone 2026-01-22 12:39:12.925500 | fa163e0d-6f45-64a1-ca76-0000000001f5 | TIMING | tripleo_hosts_entries : Prepare new /etc/hosts | standalone | 0:00:06.890508 | 0.24s 2026-01-22 12:39:12.936566 | fa163e0d-6f45-64a1-ca76-0000000001f5 | CHANGED | Prepare new /etc/hosts | undercloud 2026-01-22 12:39:12.937125 | fa163e0d-6f45-64a1-ca76-0000000001f5 | TIMING | tripleo_hosts_entries : Prepare new /etc/hosts | undercloud | 0:00:06.902143 | 0.27s 2026-01-22 12:39:12.951191 | fa163e0d-6f45-64a1-ca76-0000000001f6 | TASK | Update /etc/hosts contents (if changed) 2026-01-22 12:39:13.249247 | fa163e0d-6f45-64a1-ca76-0000000001f6 | CHANGED | Update /etc/hosts contents (if changed) | undercloud 2026-01-22 12:39:13.251972 | fa163e0d-6f45-64a1-ca76-0000000001f6 | TIMING | tripleo_hosts_entries : Update /etc/hosts contents (if changed) | undercloud | 0:00:07.216971 | 0.30s 2026-01-22 12:39:13.308825 | fa163e0d-6f45-64a1-ca76-0000000001f6 | CHANGED | Update /etc/hosts contents (if changed) | standalone 2026-01-22 12:39:13.310933 | fa163e0d-6f45-64a1-ca76-0000000001f6 | TIMING | tripleo_hosts_entries : Update /etc/hosts contents (if changed) | standalone | 0:00:07.275942 | 0.35s 2026-01-22 12:39:13.330060 | fa163e0d-6f45-64a1-ca76-0000000001f7 | TASK | Clean up temporary hosts file 2026-01-22 12:39:13.684723 | fa163e0d-6f45-64a1-ca76-0000000001f7 | CHANGED | Clean up temporary hosts file | standalone 2026-01-22 12:39:13.685784 | fa163e0d-6f45-64a1-ca76-0000000001f7 | TIMING | tripleo_hosts_entries : Clean up temporary hosts file | standalone | 0:00:07.650800 | 0.33s 2026-01-22 12:39:13.686216 | fa163e0d-6f45-64a1-ca76-0000000001f7 | CHANGED | Clean up temporary hosts file | undercloud 2026-01-22 12:39:13.686580 | fa163e0d-6f45-64a1-ca76-0000000001f7 | TIMING | tripleo_hosts_entries : Clean up temporary hosts file | undercloud | 0:00:07.651602 | 0.35s PLAY [Common roles for TripleO servers] **************************************** 2026-01-22 12:39:13.768709 | fa163e0d-6f45-64a1-ca76-00000000002a | TASK | Common roles for TripleO servers 2026-01-22 12:39:13.795223 | fa163e0d-6f45-64a1-ca76-00000000002a | OK | Common roles for TripleO servers | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Common roles for TripleO servers' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-00000000002a') missing from stats 2026-01-22 12:39:13.856149 | fa163e0d-6f45-64a1-ca76-00000000002c | TIMING | include_role : tripleo_bootstrap | standalone | 0:00:07.821160 | 0.02s 2026-01-22 12:39:13.907538 | fa163e0d-6f45-64a1-ca76-000000000254 | TASK | Gather variables for each operating system 2026-01-22 12:39:14.022569 | fa163e0d-6f45-64a1-ca76-000000000254 | OK | Gather variables for each operating system | standalone | item=/usr/share/ansible/roles/tripleo_bootstrap/vars/redhat-9.yml 2026-01-22 12:39:14.024038 | fa163e0d-6f45-64a1-ca76-000000000254 | TIMING | tripleo_bootstrap : Gather variables for each operating system | standalone | 0:00:07.989043 | 0.12s 2026-01-22 12:39:14.029744 | fa163e0d-6f45-64a1-ca76-000000000254 | TIMING | tripleo_bootstrap : Gather variables for each operating system | standalone | 0:00:07.994748 | 0.12s 2026-01-22 12:39:14.053813 | fa163e0d-6f45-64a1-ca76-000000000256 | TASK | Check release version package is installed 2026-01-22 12:39:14.357059 | fa163e0d-6f45-64a1-ca76-000000000256 | CHANGED | Check release version package is installed | standalone 2026-01-22 12:39:14.358454 | fa163e0d-6f45-64a1-ca76-000000000256 | TIMING | tripleo_bootstrap : Check release version package is installed | standalone | 0:00:08.323459 | 0.30s 2026-01-22 12:39:14.386104 | fa163e0d-6f45-64a1-ca76-000000000257 | TASK | Deploy release version package 2026-01-22 12:39:19.542852 | fa163e0d-6f45-64a1-ca76-000000000257 | CHANGED | Deploy release version package | standalone 2026-01-22 12:39:19.544362 | fa163e0d-6f45-64a1-ca76-000000000257 | TIMING | tripleo_bootstrap : Deploy release version package | standalone | 0:00:13.509358 | 5.16s 2026-01-22 12:39:19.569495 | fa163e0d-6f45-64a1-ca76-000000000259 | TASK | Include packages tasks 2026-01-22 12:39:19.596518 | fa163e0d-6f45-64a1-ca76-000000000259 | TIMING | tripleo_bootstrap : Include packages tasks | standalone | 0:00:13.561514 | 0.03s 2026-01-22 12:39:19.622343 | c17e4c1f-c247-4370-a598-83310a06f6ae | INCLUDED | /usr/share/ansible/roles/tripleo_bootstrap/tasks/packages.yml | standalone 2026-01-22 12:39:19.653046 | fa163e0d-6f45-64a1-ca76-0000000002ad | TASK | Gather facts if they don't exist 2026-01-22 12:39:19.700034 | fa163e0d-6f45-64a1-ca76-0000000002ad | SKIPPED | Gather facts if they don't exist | standalone 2026-01-22 12:39:19.701233 | fa163e0d-6f45-64a1-ca76-0000000002ad | TIMING | tripleo_bootstrap : Gather facts if they don't exist | standalone | 0:00:13.666241 | 0.05s 2026-01-22 12:39:19.725313 | fa163e0d-6f45-64a1-ca76-0000000002ae | TASK | Gather variables for each operating system 2026-01-22 12:39:19.833369 | fa163e0d-6f45-64a1-ca76-0000000002ae | SKIPPED | Gather variables for each operating system | standalone | item=/usr/share/ansible/roles/tripleo_bootstrap/vars/redhat-9.yml 2026-01-22 12:39:19.844430 | fa163e0d-6f45-64a1-ca76-0000000002ae | TIMING | tripleo_bootstrap : Gather variables for each operating system | standalone | 0:00:13.809431 | 0.12s 2026-01-22 12:39:19.862223 | fa163e0d-6f45-64a1-ca76-0000000002b0 | TASK | Check required packages to bootstrap TripleO is installed 2026-01-22 12:39:20.102306 | fa163e0d-6f45-64a1-ca76-0000000002b0 | CHANGED | Check required packages to bootstrap TripleO is installed | standalone 2026-01-22 12:39:20.103860 | fa163e0d-6f45-64a1-ca76-0000000002b0 | TIMING | tripleo_bootstrap : Check required packages to bootstrap TripleO is installed | standalone | 0:00:14.068864 | 0.24s 2026-01-22 12:39:20.129894 | fa163e0d-6f45-64a1-ca76-0000000002b1 | TASK | Deploy required packages to bootstrap TripleO 2026-01-22 12:39:25.673296 | fa163e0d-6f45-64a1-ca76-0000000002b1 | CHANGED | Deploy required packages to bootstrap TripleO | standalone 2026-01-22 12:39:25.676147 | fa163e0d-6f45-64a1-ca76-0000000002b1 | TIMING | tripleo_bootstrap : Deploy required packages to bootstrap TripleO | standalone | 0:00:19.641146 | 5.54s 2026-01-22 12:39:25.701126 | fa163e0d-6f45-64a1-ca76-0000000002b2 | TASK | Ensure packages are actually well installed 2026-01-22 12:39:26.734508 | fa163e0d-6f45-64a1-ca76-0000000002b2 | CHANGED | Ensure packages are actually well installed | standalone 2026-01-22 12:39:26.735717 | fa163e0d-6f45-64a1-ca76-0000000002b2 | TIMING | tripleo_bootstrap : Ensure packages are actually well installed | standalone | 0:00:20.700725 | 1.03s 2026-01-22 12:39:26.755880 | fa163e0d-6f45-64a1-ca76-00000000025a | TASK | Enable openvswitch service if installed 2026-01-22 12:39:27.715894 | fa163e0d-6f45-64a1-ca76-00000000025a | CHANGED | Enable openvswitch service if installed | standalone 2026-01-22 12:39:27.717816 | fa163e0d-6f45-64a1-ca76-00000000025a | TIMING | tripleo_bootstrap : Enable openvswitch service if installed | standalone | 0:00:21.682820 | 0.96s 2026-01-22 12:39:27.746086 | fa163e0d-6f45-64a1-ca76-00000000025b | TASK | Create /var/lib/heat-config/tripleo-config-download directory for deployment data 2026-01-22 12:39:27.917930 | fa163e0d-6f45-64a1-ca76-00000000025b | CHANGED | Create /var/lib/heat-config/tripleo-config-download directory for deployment data | standalone 2026-01-22 12:39:27.919700 | fa163e0d-6f45-64a1-ca76-00000000025b | TIMING | tripleo_bootstrap : Create /var/lib/heat-config/tripleo-config-download directory for deployment data | standalone | 0:00:21.884668 | 0.17s 2026-01-22 12:39:27.937437 | fa163e0d-6f45-64a1-ca76-00000000025d | TASK | Check required legacy network packages for bootstrap TripleO is installed 2026-01-22 12:39:28.186173 | fa163e0d-6f45-64a1-ca76-00000000025d | CHANGED | Check required legacy network packages for bootstrap TripleO is installed | standalone 2026-01-22 12:39:28.186916 | fa163e0d-6f45-64a1-ca76-00000000025d | TIMING | tripleo_bootstrap : Check required legacy network packages for bootstrap TripleO is installed | standalone | 0:00:22.151931 | 0.25s 2026-01-22 12:39:28.203403 | fa163e0d-6f45-64a1-ca76-00000000025e | TASK | Deploy network-scripts required for deprecated network service 2026-01-22 12:39:28.262439 | fa163e0d-6f45-64a1-ca76-00000000025e | SKIPPED | Deploy network-scripts required for deprecated network service | standalone 2026-01-22 12:39:28.263321 | fa163e0d-6f45-64a1-ca76-00000000025e | TIMING | tripleo_bootstrap : Deploy network-scripts required for deprecated network service | standalone | 0:00:22.228333 | 0.06s 2026-01-22 12:39:28.282334 | fa163e0d-6f45-64a1-ca76-00000000025f | TASK | Ensure network service is enabled 2026-01-22 12:39:28.671022 | fa163e0d-6f45-64a1-ca76-00000000025f | OK | Ensure network service is enabled | standalone 2026-01-22 12:39:28.672883 | fa163e0d-6f45-64a1-ca76-00000000025f | TIMING | tripleo_bootstrap : Ensure network service is enabled | standalone | 0:00:22.637885 | 0.39s 2026-01-22 12:39:28.699494 | fa163e0d-6f45-64a1-ca76-000000000261 | TASK | Set 'dns=none' in /etc/NetworkManager/NetworkManager.conf 2026-01-22 12:39:29.032073 | fa163e0d-6f45-64a1-ca76-000000000261 | CHANGED | Set 'dns=none' in /etc/NetworkManager/NetworkManager.conf | standalone 2026-01-22 12:39:29.032764 | fa163e0d-6f45-64a1-ca76-000000000261 | TIMING | tripleo_bootstrap : Set 'dns=none' in /etc/NetworkManager/NetworkManager.conf | standalone | 0:00:22.997780 | 0.33s 2026-01-22 12:39:29.049400 | fa163e0d-6f45-64a1-ca76-000000000262 | TASK | Set 'rc-manager=unmanaged' in /etc/NetworkManager/NetworkManager.conf 2026-01-22 12:39:29.281249 | fa163e0d-6f45-64a1-ca76-000000000262 | CHANGED | Set 'rc-manager=unmanaged' in /etc/NetworkManager/NetworkManager.conf | standalone 2026-01-22 12:39:29.282544 | fa163e0d-6f45-64a1-ca76-000000000262 | TIMING | tripleo_bootstrap : Set 'rc-manager=unmanaged' in /etc/NetworkManager/NetworkManager.conf | standalone | 0:00:23.247551 | 0.23s 2026-01-22 12:39:29.306898 | fa163e0d-6f45-64a1-ca76-000000000263 | TASK | Reload NetworkManager 2026-01-22 12:39:29.697641 | fa163e0d-6f45-64a1-ca76-000000000263 | CHANGED | Reload NetworkManager | standalone 2026-01-22 12:39:29.699485 | fa163e0d-6f45-64a1-ca76-000000000263 | TIMING | tripleo_bootstrap : Reload NetworkManager | standalone | 0:00:23.664488 | 0.39s 2026-01-22 12:39:29.725879 | fa163e0d-6f45-64a1-ca76-000000000265 | TASK | Symlink puppet modules under /etc/puppet/modules 2026-01-22 12:39:29.947130 | fa163e0d-6f45-64a1-ca76-000000000265 | CHANGED | Symlink puppet modules under /etc/puppet/modules | standalone 2026-01-22 12:39:29.948443 | fa163e0d-6f45-64a1-ca76-000000000265 | TIMING | tripleo_bootstrap : Symlink puppet modules under /etc/puppet/modules | standalone | 0:00:23.913448 | 0.22s 2026-01-22 12:39:29.972491 | fa163e0d-6f45-64a1-ca76-000000000266 | TASK | Check if /usr/bin/ansible-playbook exists 2026-01-22 12:39:30.321121 | fa163e0d-6f45-64a1-ca76-000000000266 | OK | Check if /usr/bin/ansible-playbook exists | standalone 2026-01-22 12:39:30.322446 | fa163e0d-6f45-64a1-ca76-000000000266 | TIMING | tripleo_bootstrap : Check if /usr/bin/ansible-playbook exists | standalone | 0:00:24.287453 | 0.35s 2026-01-22 12:39:30.346898 | fa163e0d-6f45-64a1-ca76-000000000267 | TASK | Check if /usr/bin/ansible-playbook-3 exists 2026-01-22 12:39:30.556748 | fa163e0d-6f45-64a1-ca76-000000000267 | OK | Check if /usr/bin/ansible-playbook-3 exists | standalone 2026-01-22 12:39:30.557708 | fa163e0d-6f45-64a1-ca76-000000000267 | TIMING | tripleo_bootstrap : Check if /usr/bin/ansible-playbook-3 exists | standalone | 0:00:24.522719 | 0.21s 2026-01-22 12:39:30.581114 | fa163e0d-6f45-64a1-ca76-000000000268 | TASK | Symlink /usr/bin/ansible-playbook-3 to /usr/bin/ansible-playbook 2026-01-22 12:39:30.812130 | fa163e0d-6f45-64a1-ca76-000000000268 | CHANGED | Symlink /usr/bin/ansible-playbook-3 to /usr/bin/ansible-playbook | standalone 2026-01-22 12:39:30.813362 | fa163e0d-6f45-64a1-ca76-000000000268 | TIMING | tripleo_bootstrap : Symlink /usr/bin/ansible-playbook-3 to /usr/bin/ansible-playbook | standalone | 0:00:24.778368 | 0.23s 2026-01-22 12:39:30.877875 | fa163e0d-6f45-64a1-ca76-00000000002d | TIMING | include_role : tripleo_ssh_known_hosts | standalone | 0:00:24.842875 | 0.02s 2026-01-22 12:39:30.954049 | fa163e0d-6f45-64a1-ca76-000000000361 | TASK | Create temporary file for ssh_known_hosts 2026-01-22 12:39:31.155572 | fa163e0d-6f45-64a1-ca76-000000000361 | CHANGED | Create temporary file for ssh_known_hosts | standalone 2026-01-22 12:39:31.156293 | fa163e0d-6f45-64a1-ca76-000000000361 | TIMING | tripleo_ssh_known_hosts : Create temporary file for ssh_known_hosts | standalone | 0:00:25.121309 | 0.20s 2026-01-22 12:39:31.172791 | fa163e0d-6f45-64a1-ca76-000000000362 | TASK | Check for ssh_known_hosts file 2026-01-22 12:39:31.370957 | fa163e0d-6f45-64a1-ca76-000000000362 | OK | Check for ssh_known_hosts file | standalone 2026-01-22 12:39:31.371699 | fa163e0d-6f45-64a1-ca76-000000000362 | TIMING | tripleo_ssh_known_hosts : Check for ssh_known_hosts file | standalone | 0:00:25.336713 | 0.20s 2026-01-22 12:39:31.388245 | fa163e0d-6f45-64a1-ca76-000000000363 | TASK | Create a temporary copy of ssh_known_hosts 2026-01-22 12:39:31.413972 | fa163e0d-6f45-64a1-ca76-000000000363 | SKIPPED | Create a temporary copy of ssh_known_hosts | standalone 2026-01-22 12:39:31.414635 | fa163e0d-6f45-64a1-ca76-000000000363 | TIMING | tripleo_ssh_known_hosts : Create a temporary copy of ssh_known_hosts | standalone | 0:00:25.379651 | 0.03s 2026-01-22 12:39:31.437775 | fa163e0d-6f45-64a1-ca76-000000000364 | TASK | Write temporary file 2026-01-22 12:39:31.465056 | fa163e0d-6f45-64a1-ca76-000000000364 | SKIPPED | Write temporary file | standalone 2026-01-22 12:39:31.466191 | fa163e0d-6f45-64a1-ca76-000000000364 | TIMING | tripleo_ssh_known_hosts : Write temporary file | standalone | 0:00:25.431197 | 0.03s 2026-01-22 12:39:31.489617 | fa163e0d-6f45-64a1-ca76-000000000365 | TASK | Set ssh_known_hosts fact 2026-01-22 12:39:31.568418 | fa163e0d-6f45-64a1-ca76-000000000365 | OK | Set ssh_known_hosts fact | standalone 2026-01-22 12:39:31.569427 | fa163e0d-6f45-64a1-ca76-000000000365 | TIMING | tripleo_ssh_known_hosts : Set ssh_known_hosts fact | standalone | 0:00:25.534435 | 0.08s 2026-01-22 12:39:31.592813 | fa163e0d-6f45-64a1-ca76-000000000366 | TASK | Add host keys to temporary ssh_known_hosts 2026-01-22 12:39:31.807048 | fa163e0d-6f45-64a1-ca76-000000000366 | CHANGED | Add host keys to temporary ssh_known_hosts | standalone 2026-01-22 12:39:31.808270 | fa163e0d-6f45-64a1-ca76-000000000366 | TIMING | tripleo_ssh_known_hosts : Add host keys to temporary ssh_known_hosts | standalone | 0:00:25.773277 | 0.21s 2026-01-22 12:39:31.831918 | fa163e0d-6f45-64a1-ca76-000000000367 | TASK | In-place update of /etc/ssh_known_hosts 2026-01-22 12:39:32.023719 | fa163e0d-6f45-64a1-ca76-000000000367 | CHANGED | In-place update of /etc/ssh_known_hosts | standalone 2026-01-22 12:39:32.024889 | fa163e0d-6f45-64a1-ca76-000000000367 | TIMING | tripleo_ssh_known_hosts : In-place update of /etc/ssh_known_hosts | standalone | 0:00:25.989893 | 0.19s 2026-01-22 12:39:32.048618 | fa163e0d-6f45-64a1-ca76-000000000368 | TASK | Remove temp file 2026-01-22 12:39:32.269304 | fa163e0d-6f45-64a1-ca76-000000000368 | CHANGED | Remove temp file | standalone 2026-01-22 12:39:32.270610 | fa163e0d-6f45-64a1-ca76-000000000368 | TIMING | tripleo_ssh_known_hosts : Remove temp file | standalone | 0:00:26.235616 | 0.22s PLAY [Deploy step tasks for step 0] ******************************************** 2026-01-22 12:39:32.449237 | fa163e0d-6f45-64a1-ca76-000000000034 | TASK | Deploy step tasks for step 0 2026-01-22 12:39:32.509293 | fa163e0d-6f45-64a1-ca76-000000000034 | OK | Deploy step tasks for step 0 | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Deploy step tasks for step 0' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-000000000034') missing from stats 2026-01-22 12:39:32.534795 | fa163e0d-6f45-64a1-ca76-000000000035 | TASK | Ensure /var/log/journal exists 2026-01-22 12:39:32.765964 | fa163e0d-6f45-64a1-ca76-000000000035 | CHANGED | Ensure /var/log/journal exists | standalone 2026-01-22 12:39:32.767287 | fa163e0d-6f45-64a1-ca76-000000000035 | TIMING | Ensure /var/log/journal exists | standalone | 0:00:26.732294 | 0.23s 2026-01-22 12:39:32.794112 | fa163e0d-6f45-64a1-ca76-000000000036 | TASK | Check cloud-init status 2026-01-22 12:39:33.024202 | fa163e0d-6f45-64a1-ca76-000000000036 | CHANGED | Check cloud-init status | standalone 2026-01-22 12:39:33.024942 | fa163e0d-6f45-64a1-ca76-000000000036 | TIMING | Check cloud-init status | standalone | 0:00:26.989960 | 0.23s 2026-01-22 12:39:33.048864 | fa163e0d-6f45-64a1-ca76-000000000037 | TASK | Check if cloud-init is disabled via kernel args 2026-01-22 12:39:33.258604 | fa163e0d-6f45-64a1-ca76-000000000037 | CHANGED | Check if cloud-init is disabled via kernel args | standalone 2026-01-22 12:39:33.259342 | fa163e0d-6f45-64a1-ca76-000000000037 | TIMING | Check if cloud-init is disabled via kernel args | standalone | 0:00:27.224359 | 0.21s 2026-01-22 12:39:33.276061 | fa163e0d-6f45-64a1-ca76-000000000038 | TASK | Wait for cloud-init to finish, if enabled 2026-01-22 12:39:33.631415 | fa163e0d-6f45-64a1-ca76-000000000038 | OK | Wait for cloud-init to finish, if enabled | standalone 2026-01-22 12:39:33.632737 | fa163e0d-6f45-64a1-ca76-000000000038 | TIMING | Wait for cloud-init to finish, if enabled | standalone | 0:00:27.597746 | 0.36s 2026-01-22 12:39:33.657977 | fa163e0d-6f45-64a1-ca76-000000000039 | TASK | Create /var/lib/container-puppet 2026-01-22 12:39:33.908399 | fa163e0d-6f45-64a1-ca76-000000000039 | CHANGED | Create /var/lib/container-puppet | standalone 2026-01-22 12:39:33.909413 | fa163e0d-6f45-64a1-ca76-000000000039 | TIMING | Create /var/lib/container-puppet | standalone | 0:00:27.874428 | 0.25s 2026-01-22 12:39:33.927150 | fa163e0d-6f45-64a1-ca76-00000000003a | TASK | Write container-puppet.sh 2026-01-22 12:39:34.354765 | fa163e0d-6f45-64a1-ca76-00000000003a | CHANGED | Write container-puppet.sh | standalone 2026-01-22 12:39:34.356002 | fa163e0d-6f45-64a1-ca76-00000000003a | TIMING | Write container-puppet.sh | standalone | 0:00:28.321010 | 0.43s [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: '{{ playbook_dir }}/{{ _task_file_path }}' is exists 2026-01-22 12:39:34.407997 | fa163e0d-6f45-64a1-ca76-00000000003b | TIMING | include_tasks | standalone | 0:00:28.373006 | 0.03s 2026-01-22 12:39:34.423336 | a9b8d35a-5468-4da3-b768-8f74495093e5 | INCLUDED | /root/standalone-ansible-mz1ymllk/Standalone/deploy_steps_tasks_step0.yaml | standalone 2026-01-22 12:39:34.448617 | fa163e0d-6f45-64a1-ca76-000000000416 | TASK | Configure tuned before reboot 2026-01-22 12:39:34.487070 | fa163e0d-6f45-64a1-ca76-000000000416 | TIMING | Configure tuned before reboot | standalone | 0:00:28.452072 | 0.04s 2026-01-22 12:39:34.537496 | fa163e0d-6f45-64a1-ca76-000000000455 | TASK | Gather variables for each operating system 2026-01-22 12:39:34.654521 | fa163e0d-6f45-64a1-ca76-000000000455 | OK | Gather variables for each operating system | standalone | item=/usr/share/ansible/roles/tuned/vars/redhat.yml 2026-01-22 12:39:34.656644 | fa163e0d-6f45-64a1-ca76-000000000455 | TIMING | tuned : Gather variables for each operating system | standalone | 0:00:28.621650 | 0.12s 2026-01-22 12:39:34.667523 | fa163e0d-6f45-64a1-ca76-000000000455 | TIMING | tuned : Gather variables for each operating system | standalone | 0:00:28.632529 | 0.13s 2026-01-22 12:39:34.754230 | fa163e0d-6f45-64a1-ca76-000000000456 | TIMING | tuned : include_tasks | standalone | 0:00:28.719226 | 0.06s 2026-01-22 12:39:34.776893 | 7fd4c9e8-32c4-420a-815f-ac3f2990fa0b | INCLUDED | /usr/share/ansible/roles/tuned/tasks/tuned_install.yml | standalone 2026-01-22 12:39:34.813047 | fa163e0d-6f45-64a1-ca76-00000000047f | TASK | Check tuned package is installed 2026-01-22 12:39:35.087426 | fa163e0d-6f45-64a1-ca76-00000000047f | CHANGED | Check tuned package is installed | standalone 2026-01-22 12:39:35.088852 | fa163e0d-6f45-64a1-ca76-00000000047f | TIMING | tuned : Check tuned package is installed | standalone | 0:00:29.053859 | 0.27s 2026-01-22 12:39:35.116132 | fa163e0d-6f45-64a1-ca76-000000000480 | TASK | Install tuned 2026-01-22 12:39:41.576124 | fa163e0d-6f45-64a1-ca76-000000000480 | CHANGED | Install tuned | standalone 2026-01-22 12:39:41.579287 | fa163e0d-6f45-64a1-ca76-000000000480 | TIMING | tuned : Install tuned | standalone | 0:00:35.544291 | 6.46s 2026-01-22 12:39:41.607231 | fa163e0d-6f45-64a1-ca76-000000000481 | TASK | Restart tuned 2026-01-22 12:39:43.392880 | fa163e0d-6f45-64a1-ca76-000000000481 | CHANGED | Restart tuned | standalone 2026-01-22 12:39:43.394786 | fa163e0d-6f45-64a1-ca76-000000000481 | TIMING | tuned : Restart tuned | standalone | 0:00:37.359792 | 1.78s 2026-01-22 12:39:43.423798 | fa163e0d-6f45-64a1-ca76-000000000457 | TASK | Check for tuned-adm 2026-01-22 12:39:43.630585 | fa163e0d-6f45-64a1-ca76-000000000457 | OK | Check for tuned-adm | standalone 2026-01-22 12:39:43.631988 | fa163e0d-6f45-64a1-ca76-000000000457 | TIMING | tuned : Check for tuned-adm | standalone | 0:00:37.596997 | 0.21s 2026-01-22 12:39:43.693424 | fa163e0d-6f45-64a1-ca76-000000000458 | TIMING | tuned : include_tasks | standalone | 0:00:37.658419 | 0.03s 2026-01-22 12:39:43.723794 | c42275dd-13ad-4a58-b5dd-2c268a9466f3 | INCLUDED | /usr/share/ansible/roles/tuned/tasks/tuned_config.yml | standalone 2026-01-22 12:39:43.764247 | fa163e0d-6f45-64a1-ca76-0000000004b6 | TASK | Ensure profile directory exists 2026-01-22 12:39:43.831827 | fa163e0d-6f45-64a1-ca76-0000000004b6 | SKIPPED | Ensure profile directory exists | standalone 2026-01-22 12:39:43.832893 | fa163e0d-6f45-64a1-ca76-0000000004b6 | TIMING | tuned : Ensure profile directory exists | standalone | 0:00:37.797901 | 0.07s 2026-01-22 12:39:43.858238 | fa163e0d-6f45-64a1-ca76-0000000004b7 | TASK | Create custom tuned profile 2026-01-22 12:39:43.920868 | fa163e0d-6f45-64a1-ca76-0000000004b7 | SKIPPED | Create custom tuned profile | standalone 2026-01-22 12:39:43.921949 | fa163e0d-6f45-64a1-ca76-0000000004b7 | TIMING | tuned : Create custom tuned profile | standalone | 0:00:37.886957 | 0.06s 2026-01-22 12:39:43.946835 | fa163e0d-6f45-64a1-ca76-0000000004b8 | TASK | Check tuned active profile 2026-01-22 12:39:44.293397 | fa163e0d-6f45-64a1-ca76-0000000004b8 | OK | Check tuned active profile | standalone 2026-01-22 12:39:44.294677 | fa163e0d-6f45-64a1-ca76-0000000004b8 | TIMING | tuned : Check tuned active profile | standalone | 0:00:38.259660 | 0.35s 2026-01-22 12:39:44.320369 | fa163e0d-6f45-64a1-ca76-0000000004b9 | TASK | Check Tuned Configuration file exists 2026-01-22 12:39:44.541731 | fa163e0d-6f45-64a1-ca76-0000000004b9 | OK | Check Tuned Configuration file exists | standalone 2026-01-22 12:39:44.543059 | fa163e0d-6f45-64a1-ca76-0000000004b9 | TIMING | tuned : Check Tuned Configuration file exists | standalone | 0:00:38.508066 | 0.22s 2026-01-22 12:39:44.568555 | fa163e0d-6f45-64a1-ca76-0000000004ba | TASK | Fail if tuned profile conf is absent but isolated cores is provided 2026-01-22 12:39:44.619122 | fa163e0d-6f45-64a1-ca76-0000000004ba | SKIPPED | Fail if tuned profile conf is absent but isolated cores is provided | standalone 2026-01-22 12:39:44.620159 | fa163e0d-6f45-64a1-ca76-0000000004ba | TIMING | tuned : Fail if tuned profile conf is absent but isolated cores is provided | standalone | 0:00:38.585166 | 0.05s 2026-01-22 12:39:44.647103 | fa163e0d-6f45-64a1-ca76-0000000004bb | TASK | Configure isolated cores for profile throughput-performance 2026-01-22 12:39:44.707903 | fa163e0d-6f45-64a1-ca76-0000000004bb | SKIPPED | Configure isolated cores for profile throughput-performance | standalone 2026-01-22 12:39:44.709153 | fa163e0d-6f45-64a1-ca76-0000000004bb | TIMING | tuned : Configure isolated cores for profile throughput-performance | standalone | 0:00:38.674160 | 0.06s 2026-01-22 12:39:44.734530 | fa163e0d-6f45-64a1-ca76-0000000004bc | TASK | Enable tuned profile 2026-01-22 12:39:46.157221 | fa163e0d-6f45-64a1-ca76-0000000004bc | CHANGED | Enable tuned profile | standalone 2026-01-22 12:39:46.158606 | fa163e0d-6f45-64a1-ca76-0000000004bc | TIMING | tuned : Enable tuned profile | standalone | 0:00:40.123615 | 1.42s 2026-01-22 12:39:46.207411 | fa163e0d-6f45-64a1-ca76-000000000417 | TASK | Configure kernel args and reboot 2026-01-22 12:39:46.260823 | fa163e0d-6f45-64a1-ca76-000000000417 | TIMING | Configure kernel args and reboot | standalone | 0:00:40.225820 | 0.05s 2026-01-22 12:39:46.408181 | fa163e0d-6f45-64a1-ca76-000000000569 | TASK | Get the command line args of the node 2026-01-22 12:39:46.651152 | fa163e0d-6f45-64a1-ca76-000000000569 | CHANGED | Get the command line args of the node | standalone 2026-01-22 12:39:46.652263 | fa163e0d-6f45-64a1-ca76-000000000569 | TIMING | tripleo_kernel : Get the command line args of the node | standalone | 0:00:40.617277 | 0.24s 2026-01-22 12:39:46.673082 | fa163e0d-6f45-64a1-ca76-00000000056b | TASK | Check if node has a nova.conf 2026-01-22 12:39:46.859220 | fa163e0d-6f45-64a1-ca76-00000000056b | OK | Check if node has a nova.conf | standalone 2026-01-22 12:39:46.860509 | fa163e0d-6f45-64a1-ca76-00000000056b | TIMING | tripleo_kernel : Check if node has a nova.conf | standalone | 0:00:40.825518 | 0.19s 2026-01-22 12:39:46.886709 | fa163e0d-6f45-64a1-ca76-00000000056d | TASK | Warn operator about workload protection 2026-01-22 12:39:46.925827 | fa163e0d-6f45-64a1-ca76-00000000056d | SKIPPED | Warn operator about workload protection | standalone 2026-01-22 12:39:46.927070 | fa163e0d-6f45-64a1-ca76-00000000056d | TIMING | tripleo_kernel : Warn operator about workload protection | standalone | 0:00:40.892078 | 0.04s 2026-01-22 12:39:46.949771 | fa163e0d-6f45-64a1-ca76-00000000056e | TASK | Setting defer reboot fact 2026-01-22 12:39:46.988553 | fa163e0d-6f45-64a1-ca76-00000000056e | SKIPPED | Setting defer reboot fact | standalone 2026-01-22 12:39:46.989707 | fa163e0d-6f45-64a1-ca76-00000000056e | TIMING | tripleo_kernel : Setting defer reboot fact | standalone | 0:00:40.954721 | 0.04s 2026-01-22 12:39:47.012261 | fa163e0d-6f45-64a1-ca76-000000000570 | TASK | Rewrite tripleo_kernel_hugepages to use integers as keys 2026-01-22 12:39:47.041311 | fa163e0d-6f45-64a1-ca76-000000000570 | TIMING | tripleo_kernel : Rewrite tripleo_kernel_hugepages to use integers as keys | standalone | 0:00:41.006314 | 0.03s 2026-01-22 12:39:47.064082 | fa163e0d-6f45-64a1-ca76-000000000575 | TASK | Find hugepages in cmdline 2026-01-22 12:39:47.106577 | fa163e0d-6f45-64a1-ca76-000000000575 | SKIPPED | Find hugepages in cmdline | standalone 2026-01-22 12:39:47.107728 | fa163e0d-6f45-64a1-ca76-000000000575 | TIMING | tripleo_kernel : Find hugepages in cmdline | standalone | 0:00:41.072740 | 0.04s 2026-01-22 12:39:47.129769 | fa163e0d-6f45-64a1-ca76-000000000576 | TASK | Finding human-readable hugepages in cmdline 2026-01-22 12:39:47.177638 | fa163e0d-6f45-64a1-ca76-000000000576 | SKIPPED | Finding human-readable hugepages in cmdline | standalone 2026-01-22 12:39:47.178457 | fa163e0d-6f45-64a1-ca76-000000000576 | TIMING | tripleo_kernel : Finding human-readable hugepages in cmdline | standalone | 0:00:41.143474 | 0.05s 2026-01-22 12:39:47.199555 | fa163e0d-6f45-64a1-ca76-00000000057a | TASK | Find desired default hugepage size 2026-01-22 12:39:47.247195 | fa163e0d-6f45-64a1-ca76-00000000057a | SKIPPED | Find desired default hugepage size | standalone 2026-01-22 12:39:47.248011 | fa163e0d-6f45-64a1-ca76-00000000057a | TIMING | tripleo_kernel : Find desired default hugepage size | standalone | 0:00:41.213028 | 0.05s 2026-01-22 12:39:47.266209 | fa163e0d-6f45-64a1-ca76-00000000057c | TASK | Storing non-configured hugepages 2026-01-22 12:39:47.295312 | fa163e0d-6f45-64a1-ca76-00000000057c | TIMING | tripleo_kernel : Storing non-configured hugepages | standalone | 0:00:41.260320 | 0.03s 2026-01-22 12:39:47.324491 | fa163e0d-6f45-64a1-ca76-00000000057e | TASK | Validating against hugepages config received from TripleO 2026-01-22 12:39:47.376328 | fa163e0d-6f45-64a1-ca76-00000000057e | SKIPPED | Validating against hugepages config received from TripleO | standalone 2026-01-22 12:39:47.377582 | fa163e0d-6f45-64a1-ca76-00000000057e | TIMING | tripleo_kernel : Validating against hugepages config received from TripleO | standalone | 0:00:41.342584 | 0.05s 2026-01-22 12:39:47.404956 | fa163e0d-6f45-64a1-ca76-00000000057f | TASK | Determining the default hugepagesz 2026-01-22 12:39:47.444713 | fa163e0d-6f45-64a1-ca76-00000000057f | SKIPPED | Determining the default hugepagesz | standalone 2026-01-22 12:39:47.445886 | fa163e0d-6f45-64a1-ca76-00000000057f | TIMING | tripleo_kernel : Determining the default hugepagesz | standalone | 0:00:41.410894 | 0.04s 2026-01-22 12:39:47.471951 | fa163e0d-6f45-64a1-ca76-000000000580 | TASK | Determining if any hugepage configuration needs to be re-written 2026-01-22 12:39:47.522234 | fa163e0d-6f45-64a1-ca76-000000000580 | SKIPPED | Determining if any hugepage configuration needs to be re-written | standalone 2026-01-22 12:39:47.523674 | fa163e0d-6f45-64a1-ca76-000000000580 | TIMING | tripleo_kernel : Determining if any hugepage configuration needs to be re-written | standalone | 0:00:41.488654 | 0.05s 2026-01-22 12:39:47.551108 | fa163e0d-6f45-64a1-ca76-000000000581 | TASK | Making sure that we have a default hugepagesz 2026-01-22 12:39:47.580847 | fa163e0d-6f45-64a1-ca76-000000000581 | SKIPPED | Making sure that we have a default hugepagesz | standalone 2026-01-22 12:39:47.582173 | fa163e0d-6f45-64a1-ca76-000000000581 | TIMING | tripleo_kernel : Making sure that we have a default hugepagesz | standalone | 0:00:41.547180 | 0.03s 2026-01-22 12:39:47.608832 | fa163e0d-6f45-64a1-ca76-000000000583 | TASK | Warn about possible modifications 2026-01-22 12:39:47.639021 | fa163e0d-6f45-64a1-ca76-000000000583 | SKIPPED | Warn about possible modifications | standalone 2026-01-22 12:39:47.640184 | fa163e0d-6f45-64a1-ca76-000000000583 | TIMING | tripleo_kernel : Warn about possible modifications | standalone | 0:00:41.605192 | 0.03s 2026-01-22 12:39:47.666907 | fa163e0d-6f45-64a1-ca76-000000000584 | TASK | Remove hugepage settings from kernelargs 2026-01-22 12:39:47.717511 | fa163e0d-6f45-64a1-ca76-000000000584 | SKIPPED | Remove hugepage settings from kernelargs | standalone 2026-01-22 12:39:47.718975 | fa163e0d-6f45-64a1-ca76-000000000584 | TIMING | tripleo_kernel : Remove hugepage settings from kernelargs | standalone | 0:00:41.683978 | 0.05s 2026-01-22 12:39:47.745507 | fa163e0d-6f45-64a1-ca76-000000000585 | TASK | Cleaning up whitespaces 2026-01-22 12:39:47.796396 | fa163e0d-6f45-64a1-ca76-000000000585 | SKIPPED | Cleaning up whitespaces | standalone 2026-01-22 12:39:47.797545 | fa163e0d-6f45-64a1-ca76-000000000585 | TIMING | tripleo_kernel : Cleaning up whitespaces | standalone | 0:00:41.762553 | 0.05s 2026-01-22 12:39:47.825724 | fa163e0d-6f45-64a1-ca76-000000000587 | TASK | Check if the kernelargs entry is already present in the file 2026-01-22 12:39:48.198915 | fa163e0d-6f45-64a1-ca76-000000000587 | OK | Check if the kernelargs entry is already present in the file | standalone 2026-01-22 12:39:48.200322 | fa163e0d-6f45-64a1-ca76-000000000587 | TIMING | tripleo_kernel : Check if the kernelargs entry is already present in the file | standalone | 0:00:42.165330 | 0.37s 2026-01-22 12:39:48.227192 | fa163e0d-6f45-64a1-ca76-000000000589 | TASK | Delete older name TRIPLEO_HEAT_TEMPLATE_KERNEL_ARGS entries if present 2026-01-22 12:39:48.266499 | fa163e0d-6f45-64a1-ca76-000000000589 | SKIPPED | Delete older name TRIPLEO_HEAT_TEMPLATE_KERNEL_ARGS entries if present | standalone 2026-01-22 12:39:48.267754 | fa163e0d-6f45-64a1-ca76-000000000589 | TIMING | tripleo_kernel : Delete older name TRIPLEO_HEAT_TEMPLATE_KERNEL_ARGS entries if present | standalone | 0:00:42.232761 | 0.04s 2026-01-22 12:39:48.294476 | fa163e0d-6f45-64a1-ca76-00000000058a | TASK | Ensure the kernel args ( ) is present as GRUB_TRIPLEO_HEAT_TEMPLATE_KERNEL_ARGS 2026-01-22 12:39:48.354233 | fa163e0d-6f45-64a1-ca76-00000000058a | SKIPPED | Ensure the kernel args ( ) is present as GRUB_TRIPLEO_HEAT_TEMPLATE_KERNEL_ARGS | standalone 2026-01-22 12:39:48.355529 | fa163e0d-6f45-64a1-ca76-00000000058a | TIMING | tripleo_kernel : Ensure the kernel args ( ) is present as GRUB_TRIPLEO_HEAT_TEMPLATE_KERNEL_ARGS | standalone | 0:00:42.320535 | 0.06s 2026-01-22 12:39:48.380748 | fa163e0d-6f45-64a1-ca76-00000000058b | TASK | Add GRUB_TRIPLEO_HEAT_TEMPLATE_KERNEL_ARGS to the GRUB_CMDLINE_LINUX parameter 2026-01-22 12:39:48.443402 | fa163e0d-6f45-64a1-ca76-00000000058b | SKIPPED | Add GRUB_TRIPLEO_HEAT_TEMPLATE_KERNEL_ARGS to the GRUB_CMDLINE_LINUX parameter | standalone 2026-01-22 12:39:48.444467 | fa163e0d-6f45-64a1-ca76-00000000058b | TIMING | tripleo_kernel : Add GRUB_TRIPLEO_HEAT_TEMPLATE_KERNEL_ARGS to the GRUB_CMDLINE_LINUX parameter | standalone | 0:00:42.409474 | 0.06s 2026-01-22 12:39:48.467999 | fa163e0d-6f45-64a1-ca76-00000000058c | TASK | Generate grub config 2026-01-22 12:39:48.528260 | fa163e0d-6f45-64a1-ca76-00000000058c | SKIPPED | Generate grub config | standalone 2026-01-22 12:39:48.529269 | fa163e0d-6f45-64a1-ca76-00000000058c | TIMING | tripleo_kernel : Generate grub config | standalone | 0:00:42.494276 | 0.06s 2026-01-22 12:39:48.549544 | fa163e0d-6f45-64a1-ca76-000000000590 | TASK | Get the block device that /boot is on 2026-01-22 12:39:48.596903 | fa163e0d-6f45-64a1-ca76-000000000590 | SKIPPED | Get the block device that /boot is on | standalone 2026-01-22 12:39:48.597772 | fa163e0d-6f45-64a1-ca76-000000000590 | TIMING | tripleo_kernel : Get the block device that /boot is on | standalone | 0:00:42.562786 | 0.05s 2026-01-22 12:39:48.618497 | fa163e0d-6f45-64a1-ca76-000000000591 | TASK | Get the mount point that /boot is in 2026-01-22 12:39:48.688258 | fa163e0d-6f45-64a1-ca76-000000000591 | SKIPPED | Get the mount point that /boot is in | standalone 2026-01-22 12:39:48.689151 | fa163e0d-6f45-64a1-ca76-000000000591 | TIMING | tripleo_kernel : Get the mount point that /boot is in | standalone | 0:00:42.654166 | 0.07s 2026-01-22 12:39:48.709239 | fa163e0d-6f45-64a1-ca76-000000000592 | TASK | Set facts for EFI grub.cfg generation 2026-01-22 12:39:48.767407 | fa163e0d-6f45-64a1-ca76-000000000592 | SKIPPED | Set facts for EFI grub.cfg generation | standalone 2026-01-22 12:39:48.768266 | fa163e0d-6f45-64a1-ca76-000000000592 | TIMING | tripleo_kernel : Set facts for EFI grub.cfg generation | standalone | 0:00:42.733281 | 0.06s 2026-01-22 12:39:48.788384 | fa163e0d-6f45-64a1-ca76-000000000593 | TASK | Executed script to fix grub-menu 2026-01-22 12:39:48.850558 | fa163e0d-6f45-64a1-ca76-000000000593 | SKIPPED | Executed script to fix grub-menu | standalone 2026-01-22 12:39:48.851443 | fa163e0d-6f45-64a1-ca76-000000000593 | TIMING | tripleo_kernel : Executed script to fix grub-menu | standalone | 0:00:42.816458 | 0.06s 2026-01-22 12:39:48.871509 | fa163e0d-6f45-64a1-ca76-000000000594 | TASK | Check grub config paths 2026-01-22 12:39:48.941475 | fa163e0d-6f45-64a1-ca76-000000000594 | SKIPPED | Check grub config paths | standalone | item=/boot/efi/EFI/redhat 2026-01-22 12:39:48.958866 | fa163e0d-6f45-64a1-ca76-000000000594 | SKIPPED | Check grub config paths | standalone | item=/boot/efi/EFI/centos 2026-01-22 12:39:48.964878 | fa163e0d-6f45-64a1-ca76-000000000594 | TIMING | tripleo_kernel : Check grub config paths | standalone | 0:00:42.929893 | 0.09s 2026-01-22 12:39:48.985571 | fa163e0d-6f45-64a1-ca76-000000000595 | TASK | Write EFI grub.cfg 2026-01-22 12:39:49.059641 | fa163e0d-6f45-64a1-ca76-000000000595 | SKIPPED | Write EFI grub.cfg | standalone | item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': '/boot/efi/EFI/redhat', 'ansible_loop_var': 'item'} 2026-01-22 12:39:49.076689 | fa163e0d-6f45-64a1-ca76-000000000595 | SKIPPED | Write EFI grub.cfg | standalone | item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': '/boot/efi/EFI/centos', 'ansible_loop_var': 'item'} 2026-01-22 12:39:49.087910 | fa163e0d-6f45-64a1-ca76-000000000595 | TIMING | tripleo_kernel : Write EFI grub.cfg | standalone | 0:00:43.052924 | 0.10s 2026-01-22 12:39:49.108217 | fa163e0d-6f45-64a1-ca76-000000000596 | TASK | bz#2266025 modify GRUB_DEFAULT=saved in /etc/default/grub 2026-01-22 12:39:49.155774 | fa163e0d-6f45-64a1-ca76-000000000596 | SKIPPED | bz#2266025 modify GRUB_DEFAULT=saved in /etc/default/grub | standalone 2026-01-22 12:39:49.156585 | fa163e0d-6f45-64a1-ca76-000000000596 | TIMING | tripleo_kernel : bz#2266025 modify GRUB_DEFAULT=saved in /etc/default/grub | standalone | 0:00:43.121600 | 0.05s 2026-01-22 12:39:49.176935 | fa163e0d-6f45-64a1-ca76-000000000597 | TASK | OSPRH-16320 ensure that GRUB_TERMINAL_OUTPUT=console in /etc/default/grub 2026-01-22 12:39:49.235761 | fa163e0d-6f45-64a1-ca76-000000000597 | SKIPPED | OSPRH-16320 ensure that GRUB_TERMINAL_OUTPUT=console in /etc/default/grub | standalone 2026-01-22 12:39:49.236602 | fa163e0d-6f45-64a1-ca76-000000000597 | TIMING | tripleo_kernel : OSPRH-16320 ensure that GRUB_TERMINAL_OUTPUT=console in /etc/default/grub | standalone | 0:00:43.201617 | 0.06s 2026-01-22 12:39:49.256764 | fa163e0d-6f45-64a1-ca76-000000000598 | TASK | Execute grub2-mkconfig 2026-01-22 12:39:49.296865 | fa163e0d-6f45-64a1-ca76-000000000598 | SKIPPED | Execute grub2-mkconfig | standalone 2026-01-22 12:39:49.297696 | fa163e0d-6f45-64a1-ca76-000000000598 | TIMING | tripleo_kernel : Execute grub2-mkconfig | standalone | 0:00:43.262711 | 0.04s 2026-01-22 12:39:49.317725 | fa163e0d-6f45-64a1-ca76-000000000599 | TASK | Check for active tuned profile 2026-01-22 12:39:49.376332 | fa163e0d-6f45-64a1-ca76-000000000599 | SKIPPED | Check for active tuned profile | standalone 2026-01-22 12:39:49.377217 | fa163e0d-6f45-64a1-ca76-000000000599 | TIMING | tripleo_kernel : Check for active tuned profile | standalone | 0:00:43.342231 | 0.06s 2026-01-22 12:39:49.397458 | fa163e0d-6f45-64a1-ca76-00000000059a | TASK | Ensure $tuned_params is restored to /boot/loader/entries files 2026-01-22 12:39:49.455812 | fa163e0d-6f45-64a1-ca76-00000000059a | SKIPPED | Ensure $tuned_params is restored to /boot/loader/entries files | standalone 2026-01-22 12:39:49.456611 | fa163e0d-6f45-64a1-ca76-00000000059a | TIMING | tripleo_kernel : Ensure $tuned_params is restored to /boot/loader/entries files | standalone | 0:00:43.421627 | 0.06s 2026-01-22 12:39:49.473800 | fa163e0d-6f45-64a1-ca76-00000000059b | TASK | Set reboot required fact 2026-01-22 12:39:49.523329 | fa163e0d-6f45-64a1-ca76-00000000059b | SKIPPED | Set reboot required fact | standalone 2026-01-22 12:39:49.523853 | fa163e0d-6f45-64a1-ca76-00000000059b | TIMING | tripleo_kernel : Set reboot required fact | standalone | 0:00:43.488872 | 0.05s 2026-01-22 12:39:49.540745 | fa163e0d-6f45-64a1-ca76-00000000059d | TASK | Apply DPDK workarounds 2026-01-22 12:39:49.579249 | fa163e0d-6f45-64a1-ca76-00000000059d | SKIPPED | Apply DPDK workarounds | standalone 2026-01-22 12:39:49.580328 | fa163e0d-6f45-64a1-ca76-00000000059d | TIMING | Apply DPDK workarounds | standalone | 0:00:43.545336 | 0.04s 2026-01-22 12:39:49.605293 | fa163e0d-6f45-64a1-ca76-00000000059f | TASK | Ensure /etc/modules-load.d exists 2026-01-22 12:39:49.634542 | fa163e0d-6f45-64a1-ca76-00000000059f | SKIPPED | Ensure /etc/modules-load.d exists | standalone 2026-01-22 12:39:49.635701 | fa163e0d-6f45-64a1-ca76-00000000059f | TIMING | tripleo_kernel : Ensure /etc/modules-load.d exists | standalone | 0:00:43.600668 | 0.03s 2026-01-22 12:39:49.660952 | fa163e0d-6f45-64a1-ca76-0000000005a0 | TASK | Load modules 2026-01-22 12:39:49.706743 | fa163e0d-6f45-64a1-ca76-0000000005a0 | SKIPPED | Load modules | standalone | item=vfio_iommu_type1 2026-01-22 12:39:49.712708 | fa163e0d-6f45-64a1-ca76-0000000005a0 | TIMING | tripleo_kernel : Load modules | standalone | 0:00:43.677710 | 0.05s 2026-01-22 12:39:49.737675 | fa163e0d-6f45-64a1-ca76-0000000005a2 | TASK | Persist modules via modules-load.d 2026-01-22 12:39:49.782229 | fa163e0d-6f45-64a1-ca76-0000000005a2 | SKIPPED | Persist modules via modules-load.d | standalone | item=vfio_iommu_type1 2026-01-22 12:39:49.788139 | fa163e0d-6f45-64a1-ca76-0000000005a2 | TIMING | tripleo_kernel : Persist modules via modules-load.d | standalone | 0:00:43.753147 | 0.05s 2026-01-22 12:39:49.813256 | fa163e0d-6f45-64a1-ca76-0000000005a4 | TASK | Drop module persistence 2026-01-22 12:39:49.857691 | fa163e0d-6f45-64a1-ca76-0000000005a4 | SKIPPED | Drop module persistence | standalone | item=vfio_iommu_type1 2026-01-22 12:39:49.863884 | fa163e0d-6f45-64a1-ca76-0000000005a4 | TIMING | tripleo_kernel : Drop module persistence | standalone | 0:00:43.828891 | 0.05s 2026-01-22 12:39:49.889493 | fa163e0d-6f45-64a1-ca76-0000000005a6 | TASK | Set modules persistence via /etc/modules 2026-01-22 12:39:49.933589 | fa163e0d-6f45-64a1-ca76-0000000005a6 | SKIPPED | Set modules persistence via /etc/modules | standalone | item=vfio_iommu_type1 2026-01-22 12:39:49.939501 | fa163e0d-6f45-64a1-ca76-0000000005a6 | TIMING | tripleo_kernel : Set modules persistence via /etc/modules | standalone | 0:00:43.904509 | 0.05s 2026-01-22 12:39:49.965593 | fa163e0d-6f45-64a1-ca76-0000000005a8 | TASK | Modules reload 2026-01-22 12:39:50.004190 | fa163e0d-6f45-64a1-ca76-0000000005a8 | SKIPPED | Modules reload | standalone 2026-01-22 12:39:50.005344 | fa163e0d-6f45-64a1-ca76-0000000005a8 | TIMING | tripleo_kernel : Modules reload | standalone | 0:00:43.970352 | 0.04s 2026-01-22 12:39:50.030714 | fa163e0d-6f45-64a1-ca76-0000000005aa | TASK | Reboot tasks 2026-01-22 12:39:50.081523 | fa163e0d-6f45-64a1-ca76-0000000005aa | SKIPPED | Reboot tasks | standalone 2026-01-22 12:39:50.082852 | fa163e0d-6f45-64a1-ca76-0000000005aa | TIMING | tripleo_kernel : Reboot tasks | standalone | 0:00:44.047857 | 0.05s 2026-01-22 12:39:50.109921 | fa163e0d-6f45-64a1-ca76-0000000005ab | TASK | Skipping reboot for deployed node 2026-01-22 12:39:50.161755 | fa163e0d-6f45-64a1-ca76-0000000005ab | SKIPPED | Skipping reboot for deployed node | standalone 2026-01-22 12:39:50.162979 | fa163e0d-6f45-64a1-ca76-0000000005ab | TIMING | tripleo_kernel : Skipping reboot for deployed node | standalone | 0:00:44.127987 | 0.05s PLAY [Server pre network steps] ************************************************ 2026-01-22 12:39:50.341212 | fa163e0d-6f45-64a1-ca76-00000000003f | TASK | Server pre-network deployments 2026-01-22 12:39:50.366164 | fa163e0d-6f45-64a1-ca76-00000000003f | OK | Server pre-network deployments | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Server pre-network deployments' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-00000000003f') missing from stats 2026-01-22 12:39:50.386019 | fa163e0d-6f45-64a1-ca76-000000000043 | TASK | Hiera config 2026-01-22 12:39:50.411316 | fa163e0d-6f45-64a1-ca76-000000000043 | TIMING | Hiera config | standalone | 0:00:44.376328 | 0.02s 2026-01-22 12:39:50.458268 | fa163e0d-6f45-64a1-ca76-000000000721 | TASK | Create /etc/puppet/hieradata 2026-01-22 12:39:50.685788 | fa163e0d-6f45-64a1-ca76-000000000721 | CHANGED | Create /etc/puppet/hieradata | standalone 2026-01-22 12:39:50.687225 | fa163e0d-6f45-64a1-ca76-000000000721 | TIMING | tripleo_hieradata : Create /etc/puppet/hieradata | standalone | 0:00:44.652227 | 0.23s 2026-01-22 12:39:50.712579 | fa163e0d-6f45-64a1-ca76-000000000722 | TASK | Write hiera config 2026-01-22 12:39:51.178036 | fa163e0d-6f45-64a1-ca76-000000000722 | CHANGED | Write hiera config | standalone 2026-01-22 12:39:51.179420 | fa163e0d-6f45-64a1-ca76-000000000722 | TIMING | tripleo_hieradata : Write hiera config | standalone | 0:00:45.144421 | 0.47s 2026-01-22 12:39:51.229629 | fa163e0d-6f45-64a1-ca76-000000000044 | TASK | Hiera symlink 2026-01-22 12:39:51.460451 | fa163e0d-6f45-64a1-ca76-000000000044 | CHANGED | Hiera symlink | standalone 2026-01-22 12:39:51.461976 | fa163e0d-6f45-64a1-ca76-000000000044 | TIMING | Hiera symlink | standalone | 0:00:45.426984 | 0.23s 2026-01-22 12:39:51.488103 | fa163e0d-6f45-64a1-ca76-000000000045 | TASK | Hieradata from vars 2026-01-22 12:39:51.510232 | fa163e0d-6f45-64a1-ca76-000000000045 | TIMING | Hieradata from vars | standalone | 0:00:45.475227 | 0.02s 2026-01-22 12:39:51.573551 | fa163e0d-6f45-64a1-ca76-00000000077c | TASK | Copy overcloud.json to all_nodes.json 2026-01-22 12:39:52.003488 | fa163e0d-6f45-64a1-ca76-00000000077c | CHANGED | Copy overcloud.json to all_nodes.json | standalone 2026-01-22 12:39:52.004969 | fa163e0d-6f45-64a1-ca76-00000000077c | TIMING | tripleo_hieradata : Copy overcloud.json to all_nodes.json | standalone | 0:00:45.969977 | 0.43s 2026-01-22 12:39:52.031026 | fa163e0d-6f45-64a1-ca76-00000000077d | TASK | Render hieradata from template 2026-01-22 12:39:52.549595 | fa163e0d-6f45-64a1-ca76-00000000077d | CHANGED | Render hieradata from template | standalone | item=bootstrap_node 2026-01-22 12:39:52.551262 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:46.516265 | 0.52s 2026-01-22 12:39:53.079880 | fa163e0d-6f45-64a1-ca76-00000000077d | CHANGED | Render hieradata from template | standalone | item=vip_data 2026-01-22 12:39:53.081427 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:47.046444 | 1.05s 2026-01-22 12:39:53.693824 | fa163e0d-6f45-64a1-ca76-00000000077d | CHANGED | Render hieradata from template | standalone | item=net_ip_map 2026-01-22 12:39:53.696291 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:47.661296 | 1.66s 2026-01-22 12:39:54.178957 | fa163e0d-6f45-64a1-ca76-00000000077d | CHANGED | Render hieradata from template | standalone | item=cloud_domain 2026-01-22 12:39:54.179812 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:48.144827 | 2.15s 2026-01-22 12:39:54.697510 | fa163e0d-6f45-64a1-ca76-00000000077d | CHANGED | Render hieradata from template | standalone | item=fqdn 2026-01-22 12:39:54.698244 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:48.663260 | 2.67s 2026-01-22 12:39:55.186977 | fa163e0d-6f45-64a1-ca76-00000000077d | CHANGED | Render hieradata from template | standalone | item=service_names 2026-01-22 12:39:55.188040 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:49.153051 | 3.16s 2026-01-22 12:39:55.596081 | fa163e0d-6f45-64a1-ca76-00000000077d | CHANGED | Render hieradata from template | standalone | item=service_configs 2026-01-22 12:39:55.598133 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:49.563142 | 3.57s 2026-01-22 12:39:56.065998 | fa163e0d-6f45-64a1-ca76-00000000077d | CHANGED | Render hieradata from template | standalone | item=extraconfig 2026-01-22 12:39:56.067997 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:50.033006 | 4.04s 2026-01-22 12:39:56.519226 | fa163e0d-6f45-64a1-ca76-00000000077d | CHANGED | Render hieradata from template | standalone | item=role_extraconfig 2026-01-22 12:39:56.519946 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:50.484963 | 4.49s 2026-01-22 12:39:56.987672 | fa163e0d-6f45-64a1-ca76-00000000077d | CHANGED | Render hieradata from template | standalone | item=ovn_chassis_mac_map 2026-01-22 12:39:56.988731 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:50.953742 | 4.96s 2026-01-22 12:39:57.003143 | fa163e0d-6f45-64a1-ca76-00000000077d | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:00:50.968146 | 4.97s 2026-01-22 12:39:57.053523 | fa163e0d-6f45-64a1-ca76-000000000046 | TASK | Ensure ansible_managed hieradata file exists 2026-01-22 12:39:57.082501 | fa163e0d-6f45-64a1-ca76-000000000046 | TIMING | Ensure ansible_managed hieradata file exists | standalone | 0:00:51.047496 | 0.03s 2026-01-22 12:39:57.153934 | fa163e0d-6f45-64a1-ca76-000000000821 | TASK | Check for hieradata file 2026-01-22 12:39:57.367098 | fa163e0d-6f45-64a1-ca76-000000000821 | OK | Check for hieradata file | standalone 2026-01-22 12:39:57.368364 | fa163e0d-6f45-64a1-ca76-000000000821 | TIMING | tripleo_hieradata : Check for hieradata file | standalone | 0:00:51.333372 | 0.21s 2026-01-22 12:39:57.393666 | fa163e0d-6f45-64a1-ca76-000000000823 | TASK | Get existing data 2026-01-22 12:39:57.431843 | fa163e0d-6f45-64a1-ca76-000000000823 | SKIPPED | Get existing data | standalone 2026-01-22 12:39:57.433084 | fa163e0d-6f45-64a1-ca76-000000000823 | TIMING | tripleo_hieradata : Get existing data | standalone | 0:00:51.398093 | 0.04s 2026-01-22 12:39:57.457957 | fa163e0d-6f45-64a1-ca76-000000000824 | TASK | Set data fact 2026-01-22 12:39:57.507529 | fa163e0d-6f45-64a1-ca76-000000000824 | SKIPPED | Set data fact | standalone 2026-01-22 12:39:57.508788 | fa163e0d-6f45-64a1-ca76-000000000824 | TIMING | tripleo_hieradata : Set data fact | standalone | 0:00:51.473796 | 0.05s 2026-01-22 12:39:57.534079 | fa163e0d-6f45-64a1-ca76-000000000826 | TASK | Write ansible hieradata file 2026-01-22 12:39:58.023668 | fa163e0d-6f45-64a1-ca76-000000000826 | CHANGED | Write ansible hieradata file | standalone 2026-01-22 12:39:58.024904 | fa163e0d-6f45-64a1-ca76-000000000826 | TIMING | tripleo_hieradata : Write ansible hieradata file | standalone | 0:00:51.989913 | 0.49s 2026-01-22 12:39:58.124992 | fa163e0d-6f45-64a1-ca76-000000000047 | TIMING | include_tasks | standalone | 0:00:52.089988 | 0.05s PLAY [Server network deployments] ********************************************** 2026-01-22 12:39:58.286735 | fa163e0d-6f45-64a1-ca76-00000000004b | TASK | Network Configuration 2026-01-22 12:39:58.312779 | fa163e0d-6f45-64a1-ca76-00000000004b | OK | Network Configuration | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Network Configuration' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-00000000004b') missing from stats 2026-01-22 12:39:58.336131 | fa163e0d-6f45-64a1-ca76-00000000004c | TASK | Check NetworkConfig script existence 2026-01-22 12:39:58.361921 | fa163e0d-6f45-64a1-ca76-00000000004c | SKIPPED | Check NetworkConfig script existence | standalone 2026-01-22 12:39:58.362782 | fa163e0d-6f45-64a1-ca76-00000000004c | TIMING | Check NetworkConfig script existence | standalone | 0:00:52.327796 | 0.03s 2026-01-22 12:39:58.382635 | fa163e0d-6f45-64a1-ca76-000000000052 | TASK | Gather SELinux fact if needed 2026-01-22 12:39:58.411362 | fa163e0d-6f45-64a1-ca76-000000000052 | SKIPPED | Gather SELinux fact if needed | standalone 2026-01-22 12:39:58.412441 | fa163e0d-6f45-64a1-ca76-000000000052 | TIMING | tripleo_network_config : Gather SELinux fact if needed | standalone | 0:00:52.377448 | 0.03s 2026-01-22 12:39:58.432412 | fa163e0d-6f45-64a1-ca76-000000000053 | TASK | Create fcontext entry for tripleoconfig 2026-01-22 12:39:59.083441 | fa163e0d-6f45-64a1-ca76-000000000053 | OK | Create fcontext entry for tripleoconfig | standalone 2026-01-22 12:39:59.084788 | fa163e0d-6f45-64a1-ca76-000000000053 | TIMING | tripleo_network_config : Create fcontext entry for tripleoconfig | standalone | 0:00:53.049793 | 0.65s 2026-01-22 12:39:59.107476 | fa163e0d-6f45-64a1-ca76-000000000054 | TASK | Ensure requirements are satisfied 2026-01-22 12:39:59.134044 | fa163e0d-6f45-64a1-ca76-000000000054 | TIMING | Ensure requirements are satisfied | standalone | 0:00:53.099048 | 0.03s 2026-01-22 12:39:59.191776 | fa163e0d-6f45-64a1-ca76-0000000008bf | TASK | Gather facts if they don't exist 2026-01-22 12:39:59.239506 | fa163e0d-6f45-64a1-ca76-0000000008bf | SKIPPED | Gather facts if they don't exist | standalone 2026-01-22 12:39:59.240521 | fa163e0d-6f45-64a1-ca76-0000000008bf | TIMING | tripleo_bootstrap : Gather facts if they don't exist | standalone | 0:00:53.205529 | 0.05s 2026-01-22 12:39:59.265118 | fa163e0d-6f45-64a1-ca76-0000000008c0 | TASK | Gather variables for each operating system 2026-01-22 12:39:59.379135 | fa163e0d-6f45-64a1-ca76-0000000008c0 | SKIPPED | Gather variables for each operating system | standalone | item=/usr/share/ansible/roles/tripleo_bootstrap/vars/redhat-9.yml 2026-01-22 12:39:59.391157 | fa163e0d-6f45-64a1-ca76-0000000008c0 | TIMING | tripleo_bootstrap : Gather variables for each operating system | standalone | 0:00:53.356164 | 0.12s 2026-01-22 12:39:59.416574 | fa163e0d-6f45-64a1-ca76-0000000008c2 | TASK | Check required packages to bootstrap TripleO is installed 2026-01-22 12:39:59.694458 | fa163e0d-6f45-64a1-ca76-0000000008c2 | CHANGED | Check required packages to bootstrap TripleO is installed | standalone 2026-01-22 12:39:59.695754 | fa163e0d-6f45-64a1-ca76-0000000008c2 | TIMING | tripleo_bootstrap : Check required packages to bootstrap TripleO is installed | standalone | 0:00:53.660764 | 0.28s 2026-01-22 12:39:59.720553 | fa163e0d-6f45-64a1-ca76-0000000008c3 | TASK | Deploy required packages to bootstrap TripleO 2026-01-22 12:39:59.753041 | fa163e0d-6f45-64a1-ca76-0000000008c3 | SKIPPED | Deploy required packages to bootstrap TripleO | standalone 2026-01-22 12:39:59.754097 | fa163e0d-6f45-64a1-ca76-0000000008c3 | TIMING | tripleo_bootstrap : Deploy required packages to bootstrap TripleO | standalone | 0:00:53.719106 | 0.03s 2026-01-22 12:39:59.779062 | fa163e0d-6f45-64a1-ca76-0000000008c4 | TASK | Ensure packages are actually well installed 2026-01-22 12:40:00.742091 | fa163e0d-6f45-64a1-ca76-0000000008c4 | CHANGED | Ensure packages are actually well installed | standalone 2026-01-22 12:40:00.743348 | fa163e0d-6f45-64a1-ca76-0000000008c4 | TIMING | tripleo_bootstrap : Ensure packages are actually well installed | standalone | 0:00:54.708357 | 0.96s 2026-01-22 12:40:00.794496 | fa163e0d-6f45-64a1-ca76-000000000056 | TASK | Set 'no-auto-default=*' in /etc/NetworkManager/NetworkManager.conf 2026-01-22 12:40:01.038773 | fa163e0d-6f45-64a1-ca76-000000000056 | CHANGED | Set 'no-auto-default=*' in /etc/NetworkManager/NetworkManager.conf | standalone 2026-01-22 12:40:01.040086 | fa163e0d-6f45-64a1-ca76-000000000056 | TIMING | tripleo_network_config : Set 'no-auto-default=*' in /etc/NetworkManager/NetworkManager.conf | standalone | 0:00:55.005095 | 0.24s 2026-01-22 12:40:01.065024 | fa163e0d-6f45-64a1-ca76-000000000057 | TASK | Reload NetworkManager 2026-01-22 12:40:01.705842 | fa163e0d-6f45-64a1-ca76-000000000057 | CHANGED | Reload NetworkManager | standalone 2026-01-22 12:40:01.707622 | fa163e0d-6f45-64a1-ca76-000000000057 | TIMING | tripleo_network_config : Reload NetworkManager | standalone | 0:00:55.672631 | 0.64s 2026-01-22 12:40:01.734934 | fa163e0d-6f45-64a1-ca76-000000000059 | TASK | Ensure /var/lib/tripleo-config directory exists 2026-01-22 12:40:01.966546 | fa163e0d-6f45-64a1-ca76-000000000059 | CHANGED | Ensure /var/lib/tripleo-config directory exists | standalone 2026-01-22 12:40:01.967820 | fa163e0d-6f45-64a1-ca76-000000000059 | TIMING | tripleo_network_config : Ensure /var/lib/tripleo-config directory exists | standalone | 0:00:55.932830 | 0.23s 2026-01-22 12:40:01.994599 | fa163e0d-6f45-64a1-ca76-00000000005a | TASK | Check for previous run of NetworkConfig 2026-01-22 12:40:02.213436 | fa163e0d-6f45-64a1-ca76-00000000005a | OK | Check for previous run of NetworkConfig | standalone 2026-01-22 12:40:02.214801 | fa163e0d-6f45-64a1-ca76-00000000005a | TIMING | tripleo_network_config : Check for previous run of NetworkConfig | standalone | 0:00:56.179808 | 0.22s 2026-01-22 12:40:02.239859 | fa163e0d-6f45-64a1-ca76-00000000005b | TASK | Check result of previous run of NetworkConfig 2026-01-22 12:40:02.267273 | fa163e0d-6f45-64a1-ca76-00000000005b | SKIPPED | Check result of previous run of NetworkConfig | standalone 2026-01-22 12:40:02.268517 | fa163e0d-6f45-64a1-ca76-00000000005b | TIMING | tripleo_network_config : Check result of previous run of NetworkConfig | standalone | 0:00:56.233523 | 0.03s 2026-01-22 12:40:02.293996 | fa163e0d-6f45-64a1-ca76-00000000005c | TASK | Ensure we get cloud-init 2026-01-22 12:40:02.503696 | fa163e0d-6f45-64a1-ca76-00000000005c | OK | Ensure we get cloud-init | standalone 2026-01-22 12:40:02.504458 | fa163e0d-6f45-64a1-ca76-00000000005c | TIMING | tripleo_network_config : Ensure we get cloud-init | standalone | 0:00:56.469474 | 0.21s 2026-01-22 12:40:02.524178 | fa163e0d-6f45-64a1-ca76-00000000005e | TASK | Create /etc/os-net-config directory 2026-01-22 12:40:02.761195 | fa163e0d-6f45-64a1-ca76-00000000005e | OK | Create /etc/os-net-config directory | standalone 2026-01-22 12:40:02.762381 | fa163e0d-6f45-64a1-ca76-00000000005e | TIMING | tripleo_network_config : Create /etc/os-net-config directory | standalone | 0:00:56.727391 | 0.24s 2026-01-22 12:40:02.780098 | fa163e0d-6f45-64a1-ca76-00000000005f | TASK | Create os-net-config mappings from lookup data 2026-01-22 12:40:03.134362 | fa163e0d-6f45-64a1-ca76-00000000005f | OK | Create os-net-config mappings from lookup data | standalone 2026-01-22 12:40:03.135580 | fa163e0d-6f45-64a1-ca76-00000000005f | TIMING | tripleo_network_config : Create os-net-config mappings from lookup data | standalone | 0:00:57.100589 | 0.35s 2026-01-22 12:40:03.160290 | fa163e0d-6f45-64a1-ca76-000000000060 | TASK | Write os-net-config mappings file /etc/os-net-config/mapping.yaml 2026-01-22 12:40:03.241378 | fa163e0d-6f45-64a1-ca76-000000000060 | SKIPPED | Write os-net-config mappings file /etc/os-net-config/mapping.yaml | standalone 2026-01-22 12:40:03.242608 | fa163e0d-6f45-64a1-ca76-000000000060 | TIMING | tripleo_network_config : Write os-net-config mappings file /etc/os-net-config/mapping.yaml | standalone | 0:00:57.207615 | 0.08s 2026-01-22 12:40:03.268010 | fa163e0d-6f45-64a1-ca76-000000000062 | TASK | Remove /var/lib/tripleo-config/scripts directory 2026-01-22 12:40:03.546349 | fa163e0d-6f45-64a1-ca76-000000000062 | OK | Remove /var/lib/tripleo-config/scripts directory | standalone 2026-01-22 12:40:03.547606 | fa163e0d-6f45-64a1-ca76-000000000062 | TIMING | tripleo_network_config : Remove /var/lib/tripleo-config/scripts directory | standalone | 0:00:57.512613 | 0.28s 2026-01-22 12:40:03.572999 | fa163e0d-6f45-64a1-ca76-000000000063 | TASK | Run NetworkConfig with tripleo_os_net_config 2026-01-22 12:40:03.642533 | fa163e0d-6f45-64a1-ca76-000000000063 | TIMING | tripleo_network_config : Run NetworkConfig with tripleo_os_net_config | standalone | 0:00:57.607534 | 0.07s 2026-01-22 12:40:03.668307 | c7999c0d-13ca-4e2c-a297-e2ea8d948da7 | INCLUDED | /usr/share/ansible/roles/tripleo_network_config/tasks/os_net_config.yml | standalone 2026-01-22 12:40:03.700589 | fa163e0d-6f45-64a1-ca76-000000000909 | TASK | Set nic_config_file fact 2026-01-22 12:40:03.782725 | fa163e0d-6f45-64a1-ca76-000000000909 | OK | Set nic_config_file fact | standalone 2026-01-22 12:40:03.783812 | fa163e0d-6f45-64a1-ca76-000000000909 | TIMING | tripleo_network_config : Set nic_config_file fact | standalone | 0:00:57.748822 | 0.08s 2026-01-22 12:40:03.809202 | fa163e0d-6f45-64a1-ca76-00000000090a | TASK | Render overidden network config 2026-01-22 12:40:03.890682 | fa163e0d-6f45-64a1-ca76-00000000090a | SKIPPED | Render overidden network config | standalone 2026-01-22 12:40:03.891942 | fa163e0d-6f45-64a1-ca76-00000000090a | TIMING | tripleo_network_config : Render overidden network config | standalone | 0:00:57.856949 | 0.08s 2026-01-22 12:40:03.918014 | fa163e0d-6f45-64a1-ca76-00000000090b | TASK | Render network_config 2026-01-22 12:40:03.979939 | fa163e0d-6f45-64a1-ca76-00000000090b | SKIPPED | Render network_config | standalone 2026-01-22 12:40:03.981151 | fa163e0d-6f45-64a1-ca76-00000000090b | TIMING | tripleo_network_config : Render network_config | standalone | 0:00:57.946158 | 0.06s 2026-01-22 12:40:04.006867 | fa163e0d-6f45-64a1-ca76-00000000090c | TASK | Render network_config from template 2026-01-22 12:40:04.567246 | fa163e0d-6f45-64a1-ca76-00000000090c | CHANGED | Render network_config from template | standalone 2026-01-22 12:40:04.568600 | fa163e0d-6f45-64a1-ca76-00000000090c | TIMING | tripleo_network_config : Render network_config from template | standalone | 0:00:58.533607 | 0.56s 2026-01-22 12:40:04.594716 | fa163e0d-6f45-64a1-ca76-00000000090d | TASK | Run tripleo_os_net_config_module with network_config ASYNC OK on standalone: jid=810433345804.47404 2026-01-22 12:40:08.662848 | fa163e0d-6f45-64a1-ca76-00000000090d | OK | Run tripleo_os_net_config_module with network_config | standalone 2026-01-22 12:40:08.664838 | fa163e0d-6f45-64a1-ca76-00000000090d | TIMING | tripleo_network_config : Run tripleo_os_net_config_module with network_config | standalone | 0:01:02.629846 | 4.07s 2026-01-22 12:40:08.693131 | fa163e0d-6f45-64a1-ca76-000000000064 | TASK | Write rc of NetworkConfig script 2026-01-22 12:40:09.222710 | fa163e0d-6f45-64a1-ca76-000000000064 | CHANGED | Write rc of NetworkConfig script | standalone 2026-01-22 12:40:09.224178 | fa163e0d-6f45-64a1-ca76-000000000064 | TIMING | tripleo_network_config : Write rc of NetworkConfig script | standalone | 0:01:03.189187 | 0.53s 2026-01-22 12:40:09.249740 | fa163e0d-6f45-64a1-ca76-000000000065 | TASK | Disable cloud-init network config 2026-01-22 12:40:09.755541 | fa163e0d-6f45-64a1-ca76-000000000065 | CHANGED | Disable cloud-init network config | standalone 2026-01-22 12:40:09.756556 | fa163e0d-6f45-64a1-ca76-000000000065 | TIMING | tripleo_network_config : Disable cloud-init network config | standalone | 0:01:03.721571 | 0.51s 2026-01-22 12:40:09.776689 | fa163e0d-6f45-64a1-ca76-000000000066 | TASK | Ensure network service is enabled 2026-01-22 12:40:10.153261 | fa163e0d-6f45-64a1-ca76-000000000066 | OK | Ensure network service is enabled | standalone 2026-01-22 12:40:10.155079 | fa163e0d-6f45-64a1-ca76-000000000066 | TIMING | tripleo_network_config : Ensure network service is enabled | standalone | 0:01:04.120089 | 0.38s PLAY [Server network validation] *********************************************** 2026-01-22 12:40:10.324111 | fa163e0d-6f45-64a1-ca76-000000000096 | TASK | Basic Network Validation 2026-01-22 12:40:10.349713 | fa163e0d-6f45-64a1-ca76-000000000096 | TIMING | Basic Network Validation | standalone | 0:01:04.314723 | 0.02s 2026-01-22 12:40:10.393978 | fa163e0d-6f45-64a1-ca76-000000000945 | TASK | Collect default network fact 2026-01-22 12:40:10.959597 | fa163e0d-6f45-64a1-ca76-000000000945 | OK | Collect default network fact | standalone 2026-01-22 12:40:10.960945 | fa163e0d-6f45-64a1-ca76-000000000945 | TIMING | tripleo_nodes_validation : Collect default network fact | standalone | 0:01:04.925953 | 0.57s 2026-01-22 12:40:10.986549 | fa163e0d-6f45-64a1-ca76-000000000946 | TASK | Check Default IPv4 Gateway availability 2026-01-22 12:40:11.014412 | fa163e0d-6f45-64a1-ca76-000000000946 | SKIPPED | Check Default IPv4 Gateway availability | standalone 2026-01-22 12:40:11.015613 | fa163e0d-6f45-64a1-ca76-000000000946 | TIMING | tripleo_nodes_validation : Check Default IPv4 Gateway availability | standalone | 0:01:04.980621 | 0.03s 2026-01-22 12:40:11.041547 | fa163e0d-6f45-64a1-ca76-000000000947 | TASK | Check all networks Gateway availability 2026-01-22 12:40:11.069996 | fa163e0d-6f45-64a1-ca76-000000000947 | TIMING | tripleo_nodes_validation : Check all networks Gateway availability | standalone | 0:01:05.034999 | 0.03s 2026-01-22 12:40:11.095558 | fa163e0d-6f45-64a1-ca76-000000000949 | TASK | Check Controllers availability 2026-01-22 12:40:11.160215 | fa163e0d-6f45-64a1-ca76-000000000949 | SKIPPED | Check Controllers availability | standalone | item=192.168.122.100 2026-01-22 12:40:11.170098 | fa163e0d-6f45-64a1-ca76-000000000949 | SKIPPED | Check Controllers availability | standalone | item=172.18.0.100 2026-01-22 12:40:11.180622 | fa163e0d-6f45-64a1-ca76-000000000949 | SKIPPED | Check Controllers availability | standalone | item=172.20.0.100 2026-01-22 12:40:11.191282 | fa163e0d-6f45-64a1-ca76-000000000949 | SKIPPED | Check Controllers availability | standalone | item=172.17.0.100 2026-01-22 12:40:11.200246 | fa163e0d-6f45-64a1-ca76-000000000949 | SKIPPED | Check Controllers availability | standalone | item=172.19.0.100 2026-01-22 12:40:11.205423 | fa163e0d-6f45-64a1-ca76-000000000949 | SKIPPED | Check Controllers availability | standalone | item=172.21.0.100 2026-01-22 12:40:11.217682 | fa163e0d-6f45-64a1-ca76-000000000949 | TIMING | tripleo_nodes_validation : Check Controllers availability | standalone | 0:01:05.182649 | 0.12s 2026-01-22 12:40:11.243630 | fa163e0d-6f45-64a1-ca76-00000000094b | TASK | Verify the configured FQDN vs /etc/hosts 2026-01-22 12:40:11.272882 | fa163e0d-6f45-64a1-ca76-00000000094b | SKIPPED | Verify the configured FQDN vs /etc/hosts | standalone 2026-01-22 12:40:11.274222 | fa163e0d-6f45-64a1-ca76-00000000094b | TIMING | tripleo_nodes_validation : Verify the configured FQDN vs /etc/hosts | standalone | 0:01:05.239225 | 0.03s PLAY [Server pre deployment steps] ********************************************* 2026-01-22 12:40:11.449338 | fa163e0d-6f45-64a1-ca76-00000000009a | TASK | Server pre deployments 2026-01-22 12:40:11.466165 | fa163e0d-6f45-64a1-ca76-00000000009a | OK | Server pre deployments | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Server pre deployments' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-00000000009a') missing from stats 2026-01-22 12:40:11.486621 | fa163e0d-6f45-64a1-ca76-00000000009e | TASK | Hiera config 2026-01-22 12:40:11.512217 | fa163e0d-6f45-64a1-ca76-00000000009e | TIMING | Hiera config | standalone | 0:01:05.477228 | 0.02s 2026-01-22 12:40:11.551219 | fa163e0d-6f45-64a1-ca76-00000000099e | TASK | Create /etc/puppet/hieradata 2026-01-22 12:40:11.782232 | fa163e0d-6f45-64a1-ca76-00000000099e | OK | Create /etc/puppet/hieradata | standalone 2026-01-22 12:40:11.783620 | fa163e0d-6f45-64a1-ca76-00000000099e | TIMING | tripleo_hieradata : Create /etc/puppet/hieradata | standalone | 0:01:05.748628 | 0.23s 2026-01-22 12:40:11.809097 | fa163e0d-6f45-64a1-ca76-00000000099f | TASK | Write hiera config 2026-01-22 12:40:12.331123 | fa163e0d-6f45-64a1-ca76-00000000099f | OK | Write hiera config | standalone 2026-01-22 12:40:12.332536 | fa163e0d-6f45-64a1-ca76-00000000099f | TIMING | tripleo_hieradata : Write hiera config | standalone | 0:01:06.297544 | 0.52s 2026-01-22 12:40:12.379876 | fa163e0d-6f45-64a1-ca76-00000000009f | TASK | Hiera symlink 2026-01-22 12:40:12.621163 | fa163e0d-6f45-64a1-ca76-00000000009f | OK | Hiera symlink | standalone 2026-01-22 12:40:12.622475 | fa163e0d-6f45-64a1-ca76-00000000009f | TIMING | Hiera symlink | standalone | 0:01:06.587483 | 0.24s 2026-01-22 12:40:12.647001 | fa163e0d-6f45-64a1-ca76-0000000000a0 | TASK | Hieradata from vars 2026-01-22 12:40:12.675374 | fa163e0d-6f45-64a1-ca76-0000000000a0 | TIMING | Hieradata from vars | standalone | 0:01:06.640379 | 0.03s 2026-01-22 12:40:12.738225 | fa163e0d-6f45-64a1-ca76-0000000009f9 | TASK | Copy overcloud.json to all_nodes.json 2026-01-22 12:40:13.196288 | fa163e0d-6f45-64a1-ca76-0000000009f9 | OK | Copy overcloud.json to all_nodes.json | standalone 2026-01-22 12:40:13.197758 | fa163e0d-6f45-64a1-ca76-0000000009f9 | TIMING | tripleo_hieradata : Copy overcloud.json to all_nodes.json | standalone | 0:01:07.162766 | 0.46s 2026-01-22 12:40:13.222981 | fa163e0d-6f45-64a1-ca76-0000000009fa | TASK | Render hieradata from template 2026-01-22 12:40:13.699450 | fa163e0d-6f45-64a1-ca76-0000000009fa | OK | Render hieradata from template | standalone | item=bootstrap_node 2026-01-22 12:40:13.702057 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:07.667058 | 0.48s 2026-01-22 12:40:14.245171 | fa163e0d-6f45-64a1-ca76-0000000009fa | OK | Render hieradata from template | standalone | item=vip_data 2026-01-22 12:40:14.247210 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:08.212220 | 1.02s 2026-01-22 12:40:14.804354 | fa163e0d-6f45-64a1-ca76-0000000009fa | OK | Render hieradata from template | standalone | item=net_ip_map 2026-01-22 12:40:14.805550 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:08.770558 | 1.58s 2026-01-22 12:40:15.276885 | fa163e0d-6f45-64a1-ca76-0000000009fa | OK | Render hieradata from template | standalone | item=cloud_domain 2026-01-22 12:40:15.278948 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:09.243958 | 2.05s 2026-01-22 12:40:15.794763 | fa163e0d-6f45-64a1-ca76-0000000009fa | OK | Render hieradata from template | standalone | item=fqdn 2026-01-22 12:40:15.795605 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:09.760622 | 2.57s 2026-01-22 12:40:16.259813 | fa163e0d-6f45-64a1-ca76-0000000009fa | OK | Render hieradata from template | standalone | item=service_names 2026-01-22 12:40:16.261987 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:10.226997 | 3.04s 2026-01-22 12:40:16.719726 | fa163e0d-6f45-64a1-ca76-0000000009fa | OK | Render hieradata from template | standalone | item=service_configs 2026-01-22 12:40:16.721950 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:10.686958 | 3.50s 2026-01-22 12:40:17.215735 | fa163e0d-6f45-64a1-ca76-0000000009fa | OK | Render hieradata from template | standalone | item=extraconfig 2026-01-22 12:40:17.217868 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:11.182878 | 3.99s 2026-01-22 12:40:17.688731 | fa163e0d-6f45-64a1-ca76-0000000009fa | OK | Render hieradata from template | standalone | item=role_extraconfig 2026-01-22 12:40:17.691136 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:11.656142 | 4.47s 2026-01-22 12:40:18.112491 | fa163e0d-6f45-64a1-ca76-0000000009fa | OK | Render hieradata from template | standalone | item=ovn_chassis_mac_map 2026-01-22 12:40:18.114496 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:12.079506 | 4.89s 2026-01-22 12:40:18.127493 | fa163e0d-6f45-64a1-ca76-0000000009fa | TIMING | tripleo_hieradata : Render hieradata from template | standalone | 0:01:12.092495 | 4.90s 2026-01-22 12:40:18.177961 | fa163e0d-6f45-64a1-ca76-0000000000a1 | TASK | Ensure ansible_managed hieradata file exists 2026-01-22 12:40:18.206435 | fa163e0d-6f45-64a1-ca76-0000000000a1 | TIMING | Ensure ansible_managed hieradata file exists | standalone | 0:01:12.171435 | 0.03s 2026-01-22 12:40:18.275012 | fa163e0d-6f45-64a1-ca76-000000000a9e | TASK | Check for hieradata file 2026-01-22 12:40:18.495213 | fa163e0d-6f45-64a1-ca76-000000000a9e | OK | Check for hieradata file | standalone 2026-01-22 12:40:18.496561 | fa163e0d-6f45-64a1-ca76-000000000a9e | TIMING | tripleo_hieradata : Check for hieradata file | standalone | 0:01:12.461569 | 0.22s 2026-01-22 12:40:18.521916 | fa163e0d-6f45-64a1-ca76-000000000aa0 | TASK | Get existing data 2026-01-22 12:40:18.571442 | fa163e0d-6f45-64a1-ca76-000000000aa0 | SKIPPED | Get existing data | standalone 2026-01-22 12:40:18.572570 | fa163e0d-6f45-64a1-ca76-000000000aa0 | TIMING | tripleo_hieradata : Get existing data | standalone | 0:01:12.537578 | 0.05s 2026-01-22 12:40:18.598054 | fa163e0d-6f45-64a1-ca76-000000000aa1 | TASK | Set data fact 2026-01-22 12:40:18.648432 | fa163e0d-6f45-64a1-ca76-000000000aa1 | SKIPPED | Set data fact | standalone 2026-01-22 12:40:18.649542 | fa163e0d-6f45-64a1-ca76-000000000aa1 | TIMING | tripleo_hieradata : Set data fact | standalone | 0:01:12.614551 | 0.05s 2026-01-22 12:40:18.675335 | fa163e0d-6f45-64a1-ca76-000000000aa3 | TASK | Write ansible hieradata file 2026-01-22 12:40:19.188115 | fa163e0d-6f45-64a1-ca76-000000000aa3 | OK | Write ansible hieradata file | standalone 2026-01-22 12:40:19.189445 | fa163e0d-6f45-64a1-ca76-000000000aa3 | TIMING | tripleo_hieradata : Write ansible hieradata file | standalone | 0:01:13.154455 | 0.51s 2026-01-22 12:40:19.287584 | fa163e0d-6f45-64a1-ca76-0000000000a2 | TIMING | include_tasks | standalone | 0:01:13.252582 | 0.05s PLAY [Host prep steps] ********************************************************* 2026-01-22 12:40:19.427025 | fa163e0d-6f45-64a1-ca76-0000000000a6 | TASK | Host prep steps 2026-01-22 12:40:19.453956 | fa163e0d-6f45-64a1-ca76-0000000000a6 | OK | Host prep steps | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Host prep steps' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000a6') missing from stats 2026-01-22 12:40:19.528263 | fa163e0d-6f45-64a1-ca76-0000000000a7 | TASK | Deploy Artifacts 2026-01-22 12:40:19.632392 | fa163e0d-6f45-64a1-ca76-0000000000a7 | SKIPPED | Deploy Artifacts | standalone 2026-01-22 12:40:19.633478 | fa163e0d-6f45-64a1-ca76-0000000000a7 | TIMING | Deploy Artifacts | standalone | 0:01:13.598486 | 0.10s [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: '{{ playbook_dir }}/{{ _task_file_path }}' is exists 2026-01-22 12:40:19.695857 | fa163e0d-6f45-64a1-ca76-0000000000a8 | TIMING | include_tasks | standalone | 0:01:13.660866 | 0.04s 2026-01-22 12:40:19.889206 | 58b3d77c-b830-4b28-99de-326591293312 | INCLUDED | /root/standalone-ansible-mz1ymllk/Standalone/host_prep_tasks.yaml | standalone 2026-01-22 12:40:19.942545 | fa163e0d-6f45-64a1-ca76-000000000b1e | TASK | Run firewall role 2026-01-22 12:40:19.967020 | fa163e0d-6f45-64a1-ca76-000000000b1e | TIMING | Run firewall role | standalone | 0:01:13.932030 | 0.02s 2026-01-22 12:40:20.029857 | fa163e0d-6f45-64a1-ca76-000000000c1d | TASK | Gather variables for each operating system 2026-01-22 12:40:20.129376 | fa163e0d-6f45-64a1-ca76-000000000c1d | OK | Gather variables for each operating system | standalone | item=/usr/share/ansible/roles/tripleo_firewall/vars/redhat.yml 2026-01-22 12:40:20.131579 | fa163e0d-6f45-64a1-ca76-000000000c1d | TIMING | tripleo_firewall : Gather variables for each operating system | standalone | 0:01:14.096586 | 0.10s 2026-01-22 12:40:20.142270 | fa163e0d-6f45-64a1-ca76-000000000c1d | TIMING | tripleo_firewall : Gather variables for each operating system | standalone | 0:01:14.107271 | 0.11s 2026-01-22 12:40:20.169003 | fa163e0d-6f45-64a1-ca76-000000000c1e | TASK | Uninstall firewalld to prevent nftables conflicts 2026-01-22 12:40:23.023772 | fa163e0d-6f45-64a1-ca76-000000000c1e | OK | Uninstall firewalld to prevent nftables conflicts | standalone 2026-01-22 12:40:23.025181 | fa163e0d-6f45-64a1-ca76-000000000c1e | TIMING | tripleo_firewall : Uninstall firewalld to prevent nftables conflicts | standalone | 0:01:16.990190 | 2.85s 2026-01-22 12:40:23.051004 | fa163e0d-6f45-64a1-ca76-000000000c22 | TASK | Create empty var for masquerade rules 2026-01-22 12:40:23.090195 | fa163e0d-6f45-64a1-ca76-000000000c22 | OK | Create empty var for masquerade rules | standalone 2026-01-22 12:40:23.091568 | fa163e0d-6f45-64a1-ca76-000000000c22 | TIMING | tripleo_firewall : Create empty var for masquerade rules | standalone | 0:01:17.056575 | 0.04s 2026-01-22 12:40:23.117737 | fa163e0d-6f45-64a1-ca76-000000000c23 | TASK | Enable masquerade 2026-01-22 12:40:23.169535 | fa163e0d-6f45-64a1-ca76-000000000c23 | SKIPPED | Enable masquerade | standalone 2026-01-22 12:40:23.170931 | fa163e0d-6f45-64a1-ca76-000000000c23 | TIMING | tripleo_firewall : Enable masquerade | standalone | 0:01:17.135933 | 0.05s 2026-01-22 12:40:23.200243 | fa163e0d-6f45-64a1-ca76-000000000c24 | TASK | Set the default frontend firewall rules 2026-01-22 12:40:23.233422 | fa163e0d-6f45-64a1-ca76-000000000c24 | OK | Set the default frontend firewall rules | standalone 2026-01-22 12:40:23.234555 | fa163e0d-6f45-64a1-ca76-000000000c24 | TIMING | tripleo_firewall : Set the default frontend firewall rules | standalone | 0:01:17.199563 | 0.03s 2026-01-22 12:40:23.259776 | fa163e0d-6f45-64a1-ca76-000000000c27 | TASK | Set frontend rule fact (non-ssl rules) 2026-01-22 12:40:23.311632 | fa163e0d-6f45-64a1-ca76-000000000c27 | OK | Set frontend rule fact (non-ssl rules) | standalone 2026-01-22 12:40:23.313350 | fa163e0d-6f45-64a1-ca76-000000000c27 | TIMING | tripleo_firewall : Set frontend rule fact (non-ssl rules) | standalone | 0:01:17.278359 | 0.05s 2026-01-22 12:40:23.340490 | fa163e0d-6f45-64a1-ca76-000000000c28 | TASK | Set frontend rule fact (ssl rules) 2026-01-22 12:40:23.396094 | fa163e0d-6f45-64a1-ca76-000000000c28 | OK | Set frontend rule fact (ssl rules) | standalone 2026-01-22 12:40:23.397834 | fa163e0d-6f45-64a1-ca76-000000000c28 | TIMING | tripleo_firewall : Set frontend rule fact (ssl rules) | standalone | 0:01:17.362843 | 0.06s 2026-01-22 12:40:23.423729 | fa163e0d-6f45-64a1-ca76-000000000c2a | TASK | Set frontend rule fact (non-ssl rules) 2026-01-22 12:40:23.468496 | fa163e0d-6f45-64a1-ca76-000000000c2a | SKIPPED | Set frontend rule fact (non-ssl rules) | standalone 2026-01-22 12:40:23.469821 | fa163e0d-6f45-64a1-ca76-000000000c2a | TIMING | tripleo_firewall : Set frontend rule fact (non-ssl rules) | standalone | 0:01:17.434828 | 0.04s 2026-01-22 12:40:23.498105 | fa163e0d-6f45-64a1-ca76-000000000c2b | TASK | Set frontend rule fact (ssl rules) 2026-01-22 12:40:23.531182 | fa163e0d-6f45-64a1-ca76-000000000c2b | SKIPPED | Set frontend rule fact (ssl rules) | standalone 2026-01-22 12:40:23.532291 | fa163e0d-6f45-64a1-ca76-000000000c2b | TIMING | tripleo_firewall : Set frontend rule fact (ssl rules) | standalone | 0:01:17.497299 | 0.03s 2026-01-22 12:40:23.557265 | fa163e0d-6f45-64a1-ca76-000000000c2d | TASK | Enable LOG rules 2026-01-22 12:40:23.587367 | fa163e0d-6f45-64a1-ca76-000000000c2d | SKIPPED | Enable LOG rules | standalone 2026-01-22 12:40:23.588558 | fa163e0d-6f45-64a1-ca76-000000000c2d | TIMING | tripleo_firewall : Enable LOG rules | standalone | 0:01:17.553566 | 0.03s 2026-01-22 12:40:23.612585 | fa163e0d-6f45-64a1-ca76-000000000c2e | TASK | Set rule fact 2026-01-22 12:40:23.707975 | fa163e0d-6f45-64a1-ca76-000000000c2e | OK | Set rule fact | standalone 2026-01-22 12:40:23.711267 | fa163e0d-6f45-64a1-ca76-000000000c2e | TIMING | tripleo_firewall : Set rule fact | standalone | 0:01:17.676273 | 0.10s 2026-01-22 12:40:23.740335 | fa163e0d-6f45-64a1-ca76-000000000c2f | TASK | Manage rules via nftables 2026-01-22 12:40:23.769744 | fa163e0d-6f45-64a1-ca76-000000000c2f | TIMING | Manage rules via nftables | standalone | 0:01:17.734747 | 0.03s 2026-01-22 12:40:23.853883 | fa163e0d-6f45-64a1-ca76-000000000cc9 | TIMING | tripleo_nftables : ansible.builtin.include_tasks | standalone | 0:01:17.818886 | 0.03s 2026-01-22 12:40:23.875682 | 299f52d6-9e3b-471d-acfe-f89f30ba7c0a | INCLUDED | /usr/share/ansible/roles/tripleo_nftables/tasks/service-bootstrap.yml | standalone 2026-01-22 12:40:23.900691 | fa163e0d-6f45-64a1-ca76-000000000cf2 | TASK | Ensure legacy iptables services are off 2026-01-22 12:40:24.304630 | fa163e0d-6f45-64a1-ca76-000000000cf2 | OK | Ensure legacy iptables services are off | standalone | item=iptables.service 2026-01-22 12:40:24.308137 | fa163e0d-6f45-64a1-ca76-000000000cf2 | TIMING | tripleo_nftables : Ensure legacy iptables services are off | standalone | 0:01:18.273133 | 0.41s 2026-01-22 12:40:24.638848 | fa163e0d-6f45-64a1-ca76-000000000cf2 | OK | Ensure legacy iptables services are off | standalone | item=ip6tables.service 2026-01-22 12:40:24.641327 | fa163e0d-6f45-64a1-ca76-000000000cf2 | TIMING | tripleo_nftables : Ensure legacy iptables services are off | standalone | 0:01:18.606333 | 0.74s 2026-01-22 12:40:24.653377 | fa163e0d-6f45-64a1-ca76-000000000cf2 | TIMING | tripleo_nftables : Ensure legacy iptables services are off | standalone | 0:01:18.618374 | 0.75s 2026-01-22 12:40:24.683700 | fa163e0d-6f45-64a1-ca76-000000000cf3 | TASK | Ensure nftables service is enabled and running 2026-01-22 12:40:25.366801 | fa163e0d-6f45-64a1-ca76-000000000cf3 | CHANGED | Ensure nftables service is enabled and running | standalone 2026-01-22 12:40:25.368686 | fa163e0d-6f45-64a1-ca76-000000000cf3 | TIMING | tripleo_nftables : Ensure nftables service is enabled and running | standalone | 0:01:19.333669 | 0.68s 2026-01-22 12:40:25.395869 | fa163e0d-6f45-64a1-ca76-000000000cf4 | TASK | Empty nftables from anything that may lay around 2026-01-22 12:40:25.436578 | fa163e0d-6f45-64a1-ca76-000000000cf4 | SKIPPED | Empty nftables from anything that may lay around | standalone 2026-01-22 12:40:25.437680 | fa163e0d-6f45-64a1-ca76-000000000cf4 | TIMING | tripleo_nftables : Empty nftables from anything that may lay around | standalone | 0:01:19.402664 | 0.04s 2026-01-22 12:40:25.491531 | fa163e0d-6f45-64a1-ca76-000000000cca | TIMING | tripleo_nftables : ansible.builtin.include_tasks | standalone | 0:01:19.456537 | 0.03s 2026-01-22 12:40:25.530413 | 8636b3a7-31c4-418e-9339-6280a2229bc1 | INCLUDED | /usr/share/ansible/roles/tripleo_nftables/tasks/configure.yml | standalone 2026-01-22 12:40:25.565945 | fa163e0d-6f45-64a1-ca76-000000000d23 | TASK | Push empty ruleset 2026-01-22 12:40:25.978385 | fa163e0d-6f45-64a1-ca76-000000000d23 | CHANGED | Push empty ruleset | standalone 2026-01-22 12:40:25.979824 | fa163e0d-6f45-64a1-ca76-000000000d23 | TIMING | tripleo_nftables : Push empty ruleset | standalone | 0:01:19.944831 | 0.41s 2026-01-22 12:40:26.005577 | fa163e0d-6f45-64a1-ca76-000000000d24 | TASK | Load empty ruleset 2026-01-22 12:40:26.250247 | fa163e0d-6f45-64a1-ca76-000000000d24 | CHANGED | Load empty ruleset | standalone 2026-01-22 12:40:26.251613 | fa163e0d-6f45-64a1-ca76-000000000d24 | TIMING | tripleo_nftables : Load empty ruleset | standalone | 0:01:20.216622 | 0.24s 2026-01-22 12:40:26.278302 | fa163e0d-6f45-64a1-ca76-000000000d26 | TASK | Use default rules 2026-01-22 12:40:26.333058 | fa163e0d-6f45-64a1-ca76-000000000d26 | OK | Use default rules | standalone 2026-01-22 12:40:26.336503 | fa163e0d-6f45-64a1-ca76-000000000d26 | TIMING | tripleo_nftables : Use default rules | standalone | 0:01:20.301509 | 0.06s 2026-01-22 12:40:26.367590 | fa163e0d-6f45-64a1-ca76-000000000d27 | TASK | Enable LOG rules 2026-01-22 12:40:26.462772 | fa163e0d-6f45-64a1-ca76-000000000d27 | OK | Enable LOG rules | standalone 2026-01-22 12:40:26.466198 | fa163e0d-6f45-64a1-ca76-000000000d27 | TIMING | tripleo_nftables : Enable LOG rules | standalone | 0:01:20.431204 | 0.10s 2026-01-22 12:40:26.496999 | fa163e0d-6f45-64a1-ca76-000000000d29 | TASK | Get current nftables content 2026-01-22 12:40:26.740319 | fa163e0d-6f45-64a1-ca76-000000000d29 | CHANGED | Get current nftables content | standalone 2026-01-22 12:40:26.741716 | fa163e0d-6f45-64a1-ca76-000000000d29 | TIMING | tripleo_nftables : Get current nftables content | standalone | 0:01:20.706724 | 0.24s 2026-01-22 12:40:26.767994 | fa163e0d-6f45-64a1-ca76-000000000d2b | TASK | Generate chain jumps 2026-01-22 12:40:27.229908 | fa163e0d-6f45-64a1-ca76-000000000d2b | CHANGED | Generate chain jumps | standalone 2026-01-22 12:40:27.232395 | fa163e0d-6f45-64a1-ca76-000000000d2b | TIMING | tripleo_nftables : Generate chain jumps | standalone | 0:01:21.197402 | 0.46s 2026-01-22 12:40:27.259401 | fa163e0d-6f45-64a1-ca76-000000000d2c | TASK | Generate chain jumps 2026-01-22 12:40:27.806631 | fa163e0d-6f45-64a1-ca76-000000000d2c | CHANGED | Generate chain jumps | standalone 2026-01-22 12:40:27.808142 | fa163e0d-6f45-64a1-ca76-000000000d2c | TIMING | tripleo_nftables : Generate chain jumps | standalone | 0:01:21.773147 | 0.55s 2026-01-22 12:40:27.834591 | fa163e0d-6f45-64a1-ca76-000000000d2d | TASK | Generate nft flushes 2026-01-22 12:40:28.304327 | fa163e0d-6f45-64a1-ca76-000000000d2d | CHANGED | Generate nft flushes | standalone 2026-01-22 12:40:28.305965 | fa163e0d-6f45-64a1-ca76-000000000d2d | TIMING | tripleo_nftables : Generate nft flushes | standalone | 0:01:22.270972 | 0.47s 2026-01-22 12:40:28.332701 | fa163e0d-6f45-64a1-ca76-000000000d2e | TASK | Generate nft tripleo chains 2026-01-22 12:40:28.836742 | fa163e0d-6f45-64a1-ca76-000000000d2e | CHANGED | Generate nft tripleo chains | standalone 2026-01-22 12:40:28.838148 | fa163e0d-6f45-64a1-ca76-000000000d2e | TIMING | tripleo_nftables : Generate nft tripleo chains | standalone | 0:01:22.803155 | 0.50s 2026-01-22 12:40:28.864523 | fa163e0d-6f45-64a1-ca76-000000000d2f | TASK | Generate nft ruleset in static file 2026-01-22 12:40:30.576578 | fa163e0d-6f45-64a1-ca76-000000000d2f | CHANGED | Generate nft ruleset in static file | standalone 2026-01-22 12:40:30.578093 | fa163e0d-6f45-64a1-ca76-000000000d2f | TIMING | tripleo_nftables : Generate nft ruleset in static file | standalone | 0:01:24.543100 | 1.71s 2026-01-22 12:40:30.605451 | fa163e0d-6f45-64a1-ca76-000000000d31 | TASK | Validate all of the generated content before loading 2026-01-22 12:40:31.045389 | fa163e0d-6f45-64a1-ca76-000000000d31 | CHANGED | Validate all of the generated content before loading | standalone 2026-01-22 12:40:31.046883 | fa163e0d-6f45-64a1-ca76-000000000d31 | TIMING | tripleo_nftables : Validate all of the generated content before loading | standalone | 0:01:25.011887 | 0.44s 2026-01-22 12:40:31.075246 | fa163e0d-6f45-64a1-ca76-000000000d32 | TASK | Ensure we load our different nft rulesets on boot 2026-01-22 12:40:31.338809 | fa163e0d-6f45-64a1-ca76-000000000d32 | CHANGED | Ensure we load our different nft rulesets on boot | standalone 2026-01-22 12:40:31.340199 | fa163e0d-6f45-64a1-ca76-000000000d32 | TIMING | tripleo_nftables : Ensure we load our different nft rulesets on boot | standalone | 0:01:25.305207 | 0.26s 2026-01-22 12:40:31.394796 | fa163e0d-6f45-64a1-ca76-000000000ccb | TIMING | tripleo_nftables : ansible.builtin.include_tasks | standalone | 0:01:25.359795 | 0.03s 2026-01-22 12:40:31.414210 | b1fb42a5-281b-43bf-b505-2b59df942423 | INCLUDED | /usr/share/ansible/roles/tripleo_nftables/tasks/run.yml | standalone 2026-01-22 12:40:31.466274 | fa163e0d-6f45-64a1-ca76-000000000d9a | TASK | Inject our custom chains in nftables 2026-01-22 12:40:31.706253 | fa163e0d-6f45-64a1-ca76-000000000d9a | CHANGED | Inject our custom chains in nftables | standalone 2026-01-22 12:40:31.707592 | fa163e0d-6f45-64a1-ca76-000000000d9a | TIMING | tripleo_nftables : Inject our custom chains in nftables | standalone | 0:01:25.672600 | 0.24s 2026-01-22 12:40:31.732955 | fa163e0d-6f45-64a1-ca76-000000000d9b | TASK | Reload custom nftables ruleset files 2026-01-22 12:40:32.036267 | fa163e0d-6f45-64a1-ca76-000000000d9b | CHANGED | Reload custom nftables ruleset files | standalone 2026-01-22 12:40:32.037716 | fa163e0d-6f45-64a1-ca76-000000000d9b | TIMING | tripleo_nftables : Reload custom nftables ruleset files | standalone | 0:01:26.002718 | 0.30s 2026-01-22 12:40:32.109076 | fa163e0d-6f45-64a1-ca76-000000000b1f | TASK | create persistent logs directory 2026-01-22 12:40:32.371812 | fa163e0d-6f45-64a1-ca76-000000000b1f | CHANGED | create persistent logs directory | standalone | item={'mode': '0750', 'path': '/var/log/containers/aodh', 'setype': 'container_file_t'} 2026-01-22 12:40:32.375142 | fa163e0d-6f45-64a1-ca76-000000000b1f | TIMING | create persistent logs directory | standalone | 0:01:26.340139 | 0.26s 2026-01-22 12:40:32.597861 | fa163e0d-6f45-64a1-ca76-000000000b1f | CHANGED | create persistent logs directory | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/aodh-api', 'setype': 'container_file_t'} 2026-01-22 12:40:32.600100 | fa163e0d-6f45-64a1-ca76-000000000b1f | TIMING | create persistent logs directory | standalone | 0:01:26.565111 | 0.49s 2026-01-22 12:40:32.610984 | fa163e0d-6f45-64a1-ca76-000000000b1f | TIMING | create persistent logs directory | standalone | 0:01:26.575986 | 0.50s 2026-01-22 12:40:32.635983 | fa163e0d-6f45-64a1-ca76-000000000b20 | TASK | create persistent directories 2026-01-22 12:40:32.874059 | fa163e0d-6f45-64a1-ca76-000000000b20 | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/aodh', 'setype': 'container_file_t'} 2026-01-22 12:40:32.875474 | fa163e0d-6f45-64a1-ca76-000000000b20 | TIMING | create persistent directories | standalone | 0:01:26.840458 | 0.24s 2026-01-22 12:40:32.887534 | fa163e0d-6f45-64a1-ca76-000000000b20 | TIMING | create persistent directories | standalone | 0:01:26.852538 | 0.25s 2026-01-22 12:40:32.912813 | fa163e0d-6f45-64a1-ca76-000000000b21 | TASK | create persistent directories 2026-01-22 12:40:33.153417 | fa163e0d-6f45-64a1-ca76-000000000b21 | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/aodh', 'setype': 'container_file_t'} 2026-01-22 12:40:33.155434 | fa163e0d-6f45-64a1-ca76-000000000b21 | TIMING | create persistent directories | standalone | 0:01:27.120432 | 0.24s 2026-01-22 12:40:33.376820 | fa163e0d-6f45-64a1-ca76-000000000b21 | CHANGED | create persistent directories | standalone | item={'path': '/var/log/aodh', 'setype': 'container_file_t'} 2026-01-22 12:40:33.378557 | fa163e0d-6f45-64a1-ca76-000000000b21 | TIMING | create persistent directories | standalone | 0:01:27.343566 | 0.46s 2026-01-22 12:40:33.389355 | fa163e0d-6f45-64a1-ca76-000000000b21 | TIMING | create persistent directories | standalone | 0:01:27.354361 | 0.48s 2026-01-22 12:40:33.409230 | fa163e0d-6f45-64a1-ca76-000000000b22 | TASK | create persistent directories 2026-01-22 12:40:33.622720 | fa163e0d-6f45-64a1-ca76-000000000b22 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/ceilometer', 'setype': 'container_file_t'} 2026-01-22 12:40:33.624180 | fa163e0d-6f45-64a1-ca76-000000000b22 | TIMING | create persistent directories | standalone | 0:01:27.589185 | 0.21s 2026-01-22 12:40:33.636278 | fa163e0d-6f45-64a1-ca76-000000000b22 | TIMING | create persistent directories | standalone | 0:01:27.601283 | 0.23s 2026-01-22 12:40:33.661857 | fa163e0d-6f45-64a1-ca76-000000000b23 | TASK | enable virt_sandbox_use_netlink for healthcheck 2026-01-22 12:40:34.767373 | fa163e0d-6f45-64a1-ca76-000000000b23 | CHANGED | enable virt_sandbox_use_netlink for healthcheck | standalone 2026-01-22 12:40:34.768961 | fa163e0d-6f45-64a1-ca76-000000000b23 | TIMING | enable virt_sandbox_use_netlink for healthcheck | standalone | 0:01:28.733969 | 1.10s 2026-01-22 12:40:34.793358 | fa163e0d-6f45-64a1-ca76-000000000b24 | TASK | create persistent directories 2026-01-22 12:40:35.079608 | fa163e0d-6f45-64a1-ca76-000000000b24 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/cinder', 'setype': 'container_file_t'} 2026-01-22 12:40:35.081937 | fa163e0d-6f45-64a1-ca76-000000000b24 | TIMING | create persistent directories | standalone | 0:01:29.046915 | 0.29s 2026-01-22 12:40:35.294376 | fa163e0d-6f45-64a1-ca76-000000000b24 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/cinder-api', 'setype': 'container_file_t'} 2026-01-22 12:40:35.295976 | fa163e0d-6f45-64a1-ca76-000000000b24 | TIMING | create persistent directories | standalone | 0:01:29.260987 | 0.50s 2026-01-22 12:40:35.306362 | fa163e0d-6f45-64a1-ca76-000000000b24 | TIMING | create persistent directories | standalone | 0:01:29.271374 | 0.51s 2026-01-22 12:40:35.331766 | fa163e0d-6f45-64a1-ca76-000000000b25 | TASK | create fcontext entry for cinder data 2026-01-22 12:40:36.647320 | fa163e0d-6f45-64a1-ca76-000000000b25 | CHANGED | create fcontext entry for cinder data | standalone 2026-01-22 12:40:36.649017 | fa163e0d-6f45-64a1-ca76-000000000b25 | TIMING | create fcontext entry for cinder data | standalone | 0:01:30.614025 | 1.32s 2026-01-22 12:40:36.673469 | fa163e0d-6f45-64a1-ca76-000000000b26 | TASK | create persistent directories 2026-01-22 12:40:36.948862 | fa163e0d-6f45-64a1-ca76-000000000b26 | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/cinder', 'setype': 'container_file_t'} 2026-01-22 12:40:36.951237 | fa163e0d-6f45-64a1-ca76-000000000b26 | TIMING | create persistent directories | standalone | 0:01:30.916237 | 0.28s 2026-01-22 12:40:37.191144 | fa163e0d-6f45-64a1-ca76-000000000b26 | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/cinder', 'setype': 'container_file_t'} 2026-01-22 12:40:37.192050 | fa163e0d-6f45-64a1-ca76-000000000b26 | TIMING | create persistent directories | standalone | 0:01:31.157064 | 0.52s 2026-01-22 12:40:37.197759 | fa163e0d-6f45-64a1-ca76-000000000b26 | TIMING | create persistent directories | standalone | 0:01:31.162760 | 0.52s 2026-01-22 12:40:37.223396 | fa163e0d-6f45-64a1-ca76-000000000b27 | TASK | ensure ceph configurations exist 2026-01-22 12:40:37.475595 | fa163e0d-6f45-64a1-ca76-000000000b27 | CHANGED | ensure ceph configurations exist | standalone 2026-01-22 12:40:37.476996 | fa163e0d-6f45-64a1-ca76-000000000b27 | TIMING | ensure ceph configurations exist | standalone | 0:01:31.442005 | 0.25s 2026-01-22 12:40:37.501457 | fa163e0d-6f45-64a1-ca76-000000000b28 | TASK | create persistent directories 2026-01-22 12:40:37.756695 | fa163e0d-6f45-64a1-ca76-000000000b28 | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/cinder', 'setype': 'container_file_t'} 2026-01-22 12:40:37.758361 | fa163e0d-6f45-64a1-ca76-000000000b28 | TIMING | create persistent directories | standalone | 0:01:31.723362 | 0.26s 2026-01-22 12:40:37.765991 | fa163e0d-6f45-64a1-ca76-000000000b28 | TIMING | create persistent directories | standalone | 0:01:31.730991 | 0.26s 2026-01-22 12:40:37.791344 | fa163e0d-6f45-64a1-ca76-000000000b2a | TASK | Create cinder image conversion directory 2026-01-22 12:40:37.820812 | fa163e0d-6f45-64a1-ca76-000000000b2a | SKIPPED | Create cinder image conversion directory | standalone 2026-01-22 12:40:37.822082 | fa163e0d-6f45-64a1-ca76-000000000b2a | TIMING | Create cinder image conversion directory | standalone | 0:01:31.787087 | 0.03s 2026-01-22 12:40:37.846393 | fa163e0d-6f45-64a1-ca76-000000000b2b | TASK | Mount cinder's image conversion NFS share 2026-01-22 12:40:37.877774 | fa163e0d-6f45-64a1-ca76-000000000b2b | SKIPPED | Mount cinder's image conversion NFS share | standalone 2026-01-22 12:40:37.879017 | fa163e0d-6f45-64a1-ca76-000000000b2b | TIMING | Mount cinder's image conversion NFS share | standalone | 0:01:31.844022 | 0.03s 2026-01-22 12:40:37.902811 | fa163e0d-6f45-64a1-ca76-000000000b2d | TASK | cinder_configure_lvm fact 2026-01-22 12:40:37.932879 | fa163e0d-6f45-64a1-ca76-000000000b2d | OK | cinder_configure_lvm fact | standalone 2026-01-22 12:40:37.934092 | fa163e0d-6f45-64a1-ca76-000000000b2d | TIMING | cinder_configure_lvm fact | standalone | 0:01:31.899101 | 0.03s 2026-01-22 12:40:37.958156 | fa163e0d-6f45-64a1-ca76-000000000b2f | TASK | ensure LVM rpm dependencies are installed 2026-01-22 12:40:40.953292 | fa163e0d-6f45-64a1-ca76-000000000b2f | OK | ensure LVM rpm dependencies are installed | standalone 2026-01-22 12:40:40.954596 | fa163e0d-6f45-64a1-ca76-000000000b2f | TIMING | ensure LVM rpm dependencies are installed | standalone | 0:01:34.919605 | 3.00s 2026-01-22 12:40:40.978535 | fa163e0d-6f45-64a1-ca76-000000000b30 | TASK | cinder create LVM volume group dd 2026-01-22 12:40:41.210384 | fa163e0d-6f45-64a1-ca76-000000000b30 | CHANGED | cinder create LVM volume group dd | standalone 2026-01-22 12:40:41.211953 | fa163e0d-6f45-64a1-ca76-000000000b30 | TIMING | cinder create LVM volume group dd | standalone | 0:01:35.176959 | 0.23s 2026-01-22 12:40:41.237691 | fa163e0d-6f45-64a1-ca76-000000000b31 | TASK | Get or create LVM loopback device 2026-01-22 12:40:41.498857 | fa163e0d-6f45-64a1-ca76-000000000b31 | CHANGED | Get or create LVM loopback device | standalone 2026-01-22 12:40:41.499873 | fa163e0d-6f45-64a1-ca76-000000000b31 | TIMING | Get or create LVM loopback device | standalone | 0:01:35.464887 | 0.26s 2026-01-22 12:40:41.519566 | fa163e0d-6f45-64a1-ca76-000000000b32 | TASK | Create LVM volume group 2026-01-22 12:40:42.076110 | fa163e0d-6f45-64a1-ca76-000000000b32 | CHANGED | Create LVM volume group | standalone 2026-01-22 12:40:42.079166 | fa163e0d-6f45-64a1-ca76-000000000b32 | TIMING | Create LVM volume group | standalone | 0:01:36.044170 | 0.56s 2026-01-22 12:40:42.104537 | fa163e0d-6f45-64a1-ca76-000000000b33 | TASK | cinder create service to run losetup for LVM on startup 2026-01-22 12:40:42.527261 | fa163e0d-6f45-64a1-ca76-000000000b33 | CHANGED | cinder create service to run losetup for LVM on startup | standalone 2026-01-22 12:40:42.528703 | fa163e0d-6f45-64a1-ca76-000000000b33 | TIMING | cinder create service to run losetup for LVM on startup | standalone | 0:01:36.493708 | 0.42s 2026-01-22 12:40:42.553475 | fa163e0d-6f45-64a1-ca76-000000000b34 | TASK | cinder enable the LVM losetup service 2026-01-22 12:40:43.319056 | fa163e0d-6f45-64a1-ca76-000000000b34 | CHANGED | cinder enable the LVM losetup service | standalone 2026-01-22 12:40:43.321187 | fa163e0d-6f45-64a1-ca76-000000000b34 | TIMING | cinder enable the LVM losetup service | standalone | 0:01:37.286191 | 0.77s 2026-01-22 12:40:43.349195 | fa163e0d-6f45-64a1-ca76-000000000b36 | TASK | allow logrotate to read inside containers 2026-01-22 12:40:44.303117 | fa163e0d-6f45-64a1-ca76-000000000b36 | CHANGED | allow logrotate to read inside containers | standalone 2026-01-22 12:40:44.304477 | fa163e0d-6f45-64a1-ca76-000000000b36 | TIMING | allow logrotate to read inside containers | standalone | 0:01:38.269492 | 0.95s 2026-01-22 12:40:44.324134 | fa163e0d-6f45-64a1-ca76-000000000b37 | TASK | authorize httpd to listen on registry ports 2026-01-22 12:40:46.600312 | fa163e0d-6f45-64a1-ca76-000000000b37 | CHANGED | authorize httpd to listen on registry ports | standalone 2026-01-22 12:40:46.601944 | fa163e0d-6f45-64a1-ca76-000000000b37 | TIMING | authorize httpd to listen on registry ports | standalone | 0:01:40.566953 | 2.28s 2026-01-22 12:40:46.662085 | fa163e0d-6f45-64a1-ca76-000000000b39 | OK | set_fact | standalone 2026-01-22 12:40:46.663347 | fa163e0d-6f45-64a1-ca76-000000000b39 | TIMING | set_fact | standalone | 0:01:40.628356 | 0.03s 2026-01-22 12:40:46.702959 | fa163e0d-6f45-64a1-ca76-000000000b3a | TIMING | include_role : tripleo_image_serve | standalone | 0:01:40.667960 | 0.02s 2026-01-22 12:40:46.758322 | fa163e0d-6f45-64a1-ca76-000000000ee1 | TASK | ensure apache is installed 2026-01-22 12:40:52.987458 | fa163e0d-6f45-64a1-ca76-000000000ee1 | CHANGED | ensure apache is installed | standalone 2026-01-22 12:40:52.989180 | fa163e0d-6f45-64a1-ca76-000000000ee1 | TIMING | tripleo_image_serve : ensure apache is installed | standalone | 0:01:46.954187 | 6.23s 2026-01-22 12:40:53.015012 | fa163e0d-6f45-64a1-ca76-000000000ee2 | TASK | create image data directory 2026-01-22 12:40:53.272529 | fa163e0d-6f45-64a1-ca76-000000000ee2 | CHANGED | create image data directory | standalone 2026-01-22 12:40:53.273954 | fa163e0d-6f45-64a1-ca76-000000000ee2 | TIMING | tripleo_image_serve : create image data directory | standalone | 0:01:47.238961 | 0.26s 2026-01-22 12:40:53.299259 | fa163e0d-6f45-64a1-ca76-000000000ee3 | TASK | create /v2/ response file 2026-01-22 12:40:53.732746 | fa163e0d-6f45-64a1-ca76-000000000ee3 | CHANGED | create /v2/ response file | standalone 2026-01-22 12:40:53.733678 | fa163e0d-6f45-64a1-ca76-000000000ee3 | TIMING | tripleo_image_serve : create /v2/ response file | standalone | 0:01:47.698693 | 0.43s 2026-01-22 12:40:53.760562 | fa163e0d-6f45-64a1-ca76-000000000ee4 | TASK | Add listen line 2026-01-22 12:40:54.132476 | fa163e0d-6f45-64a1-ca76-000000000ee4 | CHANGED | Add listen line | standalone 2026-01-22 12:40:54.133847 | fa163e0d-6f45-64a1-ca76-000000000ee4 | TIMING | tripleo_image_serve : Add listen line | standalone | 0:01:48.098854 | 0.37s 2026-01-22 12:40:54.160596 | fa163e0d-6f45-64a1-ca76-000000000ee5 | TASK | manage /etc/httpd/conf.d/image-serve.conf 2026-01-22 12:40:54.627895 | fa163e0d-6f45-64a1-ca76-000000000ee5 | CHANGED | manage /etc/httpd/conf.d/image-serve.conf | standalone 2026-01-22 12:40:54.629182 | fa163e0d-6f45-64a1-ca76-000000000ee5 | TIMING | tripleo_image_serve : manage /etc/httpd/conf.d/image-serve.conf | standalone | 0:01:48.594191 | 0.47s 2026-01-22 12:40:54.654420 | fa163e0d-6f45-64a1-ca76-000000000ee6 | TASK | Image-Serve | restart httpd 2026-01-22 12:40:55.614070 | fa163e0d-6f45-64a1-ca76-000000000ee6 | CHANGED | Image-Serve | restart httpd | standalone 2026-01-22 12:40:55.616384 | fa163e0d-6f45-64a1-ca76-000000000ee6 | TIMING | tripleo_image_serve : Image-Serve | restart httpd | standalone | 0:01:49.581391 | 0.96s 2026-01-22 12:40:55.666303 | fa163e0d-6f45-64a1-ca76-000000000b3c | TASK | create persistent logs directory 2026-01-22 12:40:55.926805 | fa163e0d-6f45-64a1-ca76-000000000b3c | CHANGED | create persistent logs directory | standalone | item={'mode': '0750', 'path': '/var/log/containers/glance', 'setype': 'container_file_t'} 2026-01-22 12:40:55.928382 | fa163e0d-6f45-64a1-ca76-000000000b3c | TIMING | create persistent logs directory | standalone | 0:01:49.893383 | 0.26s 2026-01-22 12:40:56.109849 | fa163e0d-6f45-64a1-ca76-000000000b3c | CHANGED | create persistent logs directory | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/glance', 'setype': 'container_file_t'} 2026-01-22 12:40:56.111353 | fa163e0d-6f45-64a1-ca76-000000000b3c | TIMING | create persistent logs directory | standalone | 0:01:50.076365 | 0.44s 2026-01-22 12:40:56.122285 | fa163e0d-6f45-64a1-ca76-000000000b3c | TIMING | create persistent logs directory | standalone | 0:01:50.087286 | 0.45s 2026-01-22 12:40:56.148154 | fa163e0d-6f45-64a1-ca76-000000000b3d | TASK | Mount NFS on host 2026-01-22 12:40:56.176631 | fa163e0d-6f45-64a1-ca76-000000000b3d | SKIPPED | Mount NFS on host | standalone 2026-01-22 12:40:56.177838 | fa163e0d-6f45-64a1-ca76-000000000b3d | TIMING | Mount NFS on host | standalone | 0:01:50.142845 | 0.03s 2026-01-22 12:40:56.202206 | fa163e0d-6f45-64a1-ca76-000000000b3e | TASK | Mount Node Staging Location 2026-01-22 12:40:56.225151 | fa163e0d-6f45-64a1-ca76-000000000b3e | SKIPPED | Mount Node Staging Location | standalone 2026-01-22 12:40:56.226540 | fa163e0d-6f45-64a1-ca76-000000000b3e | TIMING | Mount Node Staging Location | standalone | 0:01:50.191539 | 0.02s 2026-01-22 12:40:56.251355 | fa163e0d-6f45-64a1-ca76-000000000b3f | TASK | ensure /var/lib/glance exists 2026-01-22 12:40:56.482440 | fa163e0d-6f45-64a1-ca76-000000000b3f | CHANGED | ensure /var/lib/glance exists | standalone 2026-01-22 12:40:56.483819 | fa163e0d-6f45-64a1-ca76-000000000b3f | TIMING | ensure /var/lib/glance exists | standalone | 0:01:50.448827 | 0.23s 2026-01-22 12:40:56.508059 | fa163e0d-6f45-64a1-ca76-000000000b40 | TASK | create logs directory 2026-01-22 12:40:56.747913 | fa163e0d-6f45-64a1-ca76-000000000b40 | CHANGED | create logs directory | standalone | item={'mode': '0750', 'path': '/var/log/containers/gnocchi', 'setype': 'container_file_t'} 2026-01-22 12:40:56.750117 | fa163e0d-6f45-64a1-ca76-000000000b40 | TIMING | create logs directory | standalone | 0:01:50.715119 | 0.24s 2026-01-22 12:40:56.968617 | fa163e0d-6f45-64a1-ca76-000000000b40 | CHANGED | create logs directory | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/gnocchi-api', 'setype': 'container_file_t'} 2026-01-22 12:40:56.969676 | fa163e0d-6f45-64a1-ca76-000000000b40 | TIMING | create logs directory | standalone | 0:01:50.934694 | 0.46s 2026-01-22 12:40:56.979950 | fa163e0d-6f45-64a1-ca76-000000000b40 | TIMING | create logs directory | standalone | 0:01:50.944962 | 0.47s 2026-01-22 12:40:57.002599 | fa163e0d-6f45-64a1-ca76-000000000b41 | TASK | Mount Gnocchi NFS on host 2026-01-22 12:40:57.028778 | fa163e0d-6f45-64a1-ca76-000000000b41 | SKIPPED | Mount Gnocchi NFS on host | standalone 2026-01-22 12:40:57.029987 | fa163e0d-6f45-64a1-ca76-000000000b41 | TIMING | Mount Gnocchi NFS on host | standalone | 0:01:50.994996 | 0.03s 2026-01-22 12:40:57.053091 | fa163e0d-6f45-64a1-ca76-000000000b42 | TASK | ensure GnocchiFileBasePath exists 2026-01-22 12:40:57.284821 | fa163e0d-6f45-64a1-ca76-000000000b42 | CHANGED | ensure GnocchiFileBasePath exists | standalone 2026-01-22 12:40:57.285884 | fa163e0d-6f45-64a1-ca76-000000000b42 | TIMING | ensure GnocchiFileBasePath exists | standalone | 0:01:51.250896 | 0.23s 2026-01-22 12:40:57.305874 | fa163e0d-6f45-64a1-ca76-000000000b43 | TASK | create persistent directories 2026-01-22 12:40:57.544721 | fa163e0d-6f45-64a1-ca76-000000000b43 | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/gnocchi', 'setype': 'container_file_t'} 2026-01-22 12:40:57.546148 | fa163e0d-6f45-64a1-ca76-000000000b43 | TIMING | create persistent directories | standalone | 0:01:51.511156 | 0.24s 2026-01-22 12:40:57.551606 | fa163e0d-6f45-64a1-ca76-000000000b43 | TIMING | create persistent directories | standalone | 0:01:51.516628 | 0.24s 2026-01-22 12:40:57.568413 | fa163e0d-6f45-64a1-ca76-000000000b44 | TASK | create persistent data directory 2026-01-22 12:40:57.792540 | fa163e0d-6f45-64a1-ca76-000000000b44 | OK | create persistent data directory | standalone 2026-01-22 12:40:57.793492 | fa163e0d-6f45-64a1-ca76-000000000b44 | TIMING | create persistent data directory | standalone | 0:01:51.758507 | 0.22s 2026-01-22 12:40:57.817923 | fa163e0d-6f45-64a1-ca76-000000000b45 | TASK | create persistent directories 2026-01-22 12:40:58.069971 | fa163e0d-6f45-64a1-ca76-000000000b45 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/haproxy', 'setype': 'var_log_t'} 2026-01-22 12:40:58.072071 | fa163e0d-6f45-64a1-ca76-000000000b45 | TIMING | create persistent directories | standalone | 0:01:52.037073 | 0.25s 2026-01-22 12:40:58.295358 | fa163e0d-6f45-64a1-ca76-000000000b45 | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/haproxy', 'setype': 'container_file_t'} 2026-01-22 12:40:58.296764 | fa163e0d-6f45-64a1-ca76-000000000b45 | TIMING | create persistent directories | standalone | 0:01:52.261775 | 0.48s 2026-01-22 12:40:58.306466 | fa163e0d-6f45-64a1-ca76-000000000b45 | TIMING | create persistent directories | standalone | 0:01:52.271488 | 0.49s 2026-01-22 12:40:58.325848 | fa163e0d-6f45-64a1-ca76-000000000b46 | TASK | Run puppet on the host to apply IPtables rules 2026-01-22 12:41:06.199352 | fa163e0d-6f45-64a1-ca76-000000000b46 | OK | Run puppet on the host to apply IPtables rules | standalone 2026-01-22 12:41:06.200699 | fa163e0d-6f45-64a1-ca76-000000000b46 | TIMING | Run puppet on the host to apply IPtables rules | standalone | 0:02:00.165706 | 7.87s 2026-01-22 12:41:06.223369 | fa163e0d-6f45-64a1-ca76-000000000b47 | TASK | Debug output for task: Run puppet on the host to apply IPtables rules 2026-01-22 12:41:06.300563 | fa163e0d-6f45-64a1-ca76-000000000b47 | OK | Debug output for task: Run puppet on the host to apply IPtables rules | standalone | result={ "changed": false, "failed_when_result": false, "puppet_host_outputs.stdout_lines | default([]) | union(puppet_host_outputs.stderr_lines | default([]))": [ "Notice: Compiled catalog for standalone.ooo.test in environment production in 0.64 seconds", "Notice: Applied catalog in 0.24 seconds", "Application:", " Initial environment: production", " Converged environment: production", " Run mode: user", "Changes:", "Events:", "Resources:", " Skipped: 56", " Total: 56", "Time:", " Transaction evaluation: 0.23", " Catalog application: 0.24", " Config retrieval: 0.75", " Last run: 1769085665", " Total: 0.24", "Version:", " Config: 1769085664", " Puppet: 7.10.0", "Warning: The function 'hiera' is deprecated in favor of using 'lookup'. See https://puppet.com/docs/puppet/7.10/deprecated_language.html", " (file & line not available)", "Warning: /etc/puppet/hiera.yaml: Use of 'hiera.yaml' version 3 is deprecated. It should be converted to version 5", " (file: /etc/puppet/hiera.yaml)", "Warning: Undefined variable '::deploy_config_name'; ", "Warning: Scope(Haproxy::Config[haproxy]): haproxy: The $merge_options parameter will default to true in the next major release. Please review the documentation regarding the implications." ] } 2026-01-22 12:41:06.302548 | fa163e0d-6f45-64a1-ca76-000000000b47 | TIMING | Debug output for task: Run puppet on the host to apply IPtables rules | standalone | 0:02:00.267551 | 0.08s 2026-01-22 12:41:06.330384 | fa163e0d-6f45-64a1-ca76-000000000b48 | TASK | create persistent directories 2026-01-22 12:41:06.593138 | fa163e0d-6f45-64a1-ca76-000000000b48 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/heat', 'setype': 'container_file_t'} 2026-01-22 12:41:06.594878 | fa163e0d-6f45-64a1-ca76-000000000b48 | TIMING | create persistent directories | standalone | 0:02:00.559879 | 0.26s 2026-01-22 12:41:06.786591 | fa163e0d-6f45-64a1-ca76-000000000b48 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/heat-api', 'setype': 'container_file_t'} 2026-01-22 12:41:06.787810 | fa163e0d-6f45-64a1-ca76-000000000b48 | TIMING | create persistent directories | standalone | 0:02:00.752820 | 0.46s 2026-01-22 12:41:06.800271 | fa163e0d-6f45-64a1-ca76-000000000b48 | TIMING | create persistent directories | standalone | 0:02:00.765264 | 0.47s 2026-01-22 12:41:06.827571 | fa163e0d-6f45-64a1-ca76-000000000b49 | TASK | create persistent directories 2026-01-22 12:41:07.085296 | fa163e0d-6f45-64a1-ca76-000000000b49 | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/heat', 'setype': 'container_file_t'} 2026-01-22 12:41:07.087836 | fa163e0d-6f45-64a1-ca76-000000000b49 | TIMING | create persistent directories | standalone | 0:02:01.052837 | 0.26s 2026-01-22 12:41:07.267820 | fa163e0d-6f45-64a1-ca76-000000000b49 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/heat-api-cfn', 'setype': 'container_file_t'} 2026-01-22 12:41:07.269643 | fa163e0d-6f45-64a1-ca76-000000000b49 | TIMING | create persistent directories | standalone | 0:02:01.234653 | 0.44s 2026-01-22 12:41:07.280537 | fa163e0d-6f45-64a1-ca76-000000000b49 | TIMING | create persistent directories | standalone | 0:02:01.245525 | 0.45s 2026-01-22 12:41:07.307193 | fa163e0d-6f45-64a1-ca76-000000000b4a | TASK | create persistent directories 2026-01-22 12:41:07.552305 | fa163e0d-6f45-64a1-ca76-000000000b4a | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/heat', 'setype': 'container_file_t'} 2026-01-22 12:41:07.554486 | fa163e0d-6f45-64a1-ca76-000000000b4a | TIMING | create persistent directories | standalone | 0:02:01.519490 | 0.25s 2026-01-22 12:41:07.565355 | fa163e0d-6f45-64a1-ca76-000000000b4a | TIMING | create persistent directories | standalone | 0:02:01.530334 | 0.26s 2026-01-22 12:41:07.592220 | fa163e0d-6f45-64a1-ca76-000000000b4b | TASK | create persistent directories 2026-01-22 12:41:07.858044 | fa163e0d-6f45-64a1-ca76-000000000b4b | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/horizon', 'setype': 'container_file_t'} 2026-01-22 12:41:07.860554 | fa163e0d-6f45-64a1-ca76-000000000b4b | TIMING | create persistent directories | standalone | 0:02:01.825555 | 0.27s 2026-01-22 12:41:08.079951 | fa163e0d-6f45-64a1-ca76-000000000b4b | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/horizon', 'setype': 'container_file_t'} 2026-01-22 12:41:08.081126 | fa163e0d-6f45-64a1-ca76-000000000b4b | TIMING | create persistent directories | standalone | 0:02:02.046134 | 0.49s 2026-01-22 12:41:08.279328 | fa163e0d-6f45-64a1-ca76-000000000b4b | CHANGED | create persistent directories | standalone | item={'path': '/var/www', 'setype': 'container_file_t'} 2026-01-22 12:41:08.280511 | fa163e0d-6f45-64a1-ca76-000000000b4b | TIMING | create persistent directories | standalone | 0:02:02.245520 | 0.69s 2026-01-22 12:41:08.453589 | fa163e0d-6f45-64a1-ca76-000000000b4b | CHANGED | create persistent directories | standalone | item={'mode': '01777', 'path': '/var/tmp/horizon', 'setype': 'container_file_t'} 2026-01-22 12:41:08.455433 | fa163e0d-6f45-64a1-ca76-000000000b4b | TIMING | create persistent directories | standalone | 0:02:02.420442 | 0.86s 2026-01-22 12:41:08.467248 | fa163e0d-6f45-64a1-ca76-000000000b4b | TIMING | create persistent directories | standalone | 0:02:02.432235 | 0.87s 2026-01-22 12:41:08.496189 | fa163e0d-6f45-64a1-ca76-000000000b4c | TASK | ensure /var/tmp/horizon exists on boot 2026-01-22 12:41:08.952363 | fa163e0d-6f45-64a1-ca76-000000000b4c | CHANGED | ensure /var/tmp/horizon exists on boot | standalone 2026-01-22 12:41:08.954121 | fa163e0d-6f45-64a1-ca76-000000000b4c | TIMING | ensure /var/tmp/horizon exists on boot | standalone | 0:02:02.919126 | 0.46s 2026-01-22 12:41:08.975831 | fa163e0d-6f45-64a1-ca76-000000000b4d | TASK | create fcontext entry for iscsi 2026-01-22 12:41:10.327638 | fa163e0d-6f45-64a1-ca76-000000000b4d | CHANGED | create fcontext entry for iscsi | standalone | item={'path': '/etc/iscsi', 'setype': 'container_file_t'} 2026-01-22 12:41:10.329916 | fa163e0d-6f45-64a1-ca76-000000000b4d | TIMING | create fcontext entry for iscsi | standalone | 0:02:04.294904 | 1.35s 2026-01-22 12:41:11.568546 | fa163e0d-6f45-64a1-ca76-000000000b4d | CHANGED | create fcontext entry for iscsi | standalone | item={'path': '/etc/target', 'setype': 'container_file_t'} 2026-01-22 12:41:11.570813 | fa163e0d-6f45-64a1-ca76-000000000b4d | TIMING | create fcontext entry for iscsi | standalone | 0:02:05.535813 | 2.59s 2026-01-22 12:41:12.847756 | fa163e0d-6f45-64a1-ca76-000000000b4d | CHANGED | create fcontext entry for iscsi | standalone | item={'path': '/var/lib/iscsi', 'setype': 'container_file_t'} 2026-01-22 12:41:12.851251 | fa163e0d-6f45-64a1-ca76-000000000b4d | TIMING | create fcontext entry for iscsi | standalone | 0:02:06.816232 | 3.87s 2026-01-22 12:41:12.863728 | fa163e0d-6f45-64a1-ca76-000000000b4d | TIMING | create fcontext entry for iscsi | standalone | 0:02:06.828728 | 3.89s 2026-01-22 12:41:12.889391 | fa163e0d-6f45-64a1-ca76-000000000b4e | TASK | create persistent directories 2026-01-22 12:41:13.139805 | fa163e0d-6f45-64a1-ca76-000000000b4e | CHANGED | create persistent directories | standalone | item={'path': '/etc/iscsi', 'setype': 'container_file_t'} 2026-01-22 12:41:13.141576 | fa163e0d-6f45-64a1-ca76-000000000b4e | TIMING | create persistent directories | standalone | 0:02:07.106575 | 0.25s 2026-01-22 12:41:13.353801 | fa163e0d-6f45-64a1-ca76-000000000b4e | CHANGED | create persistent directories | standalone | item={'path': '/etc/target', 'setype': 'container_file_t'} 2026-01-22 12:41:13.355039 | fa163e0d-6f45-64a1-ca76-000000000b4e | TIMING | create persistent directories | standalone | 0:02:07.320051 | 0.46s 2026-01-22 12:41:13.506876 | fa163e0d-6f45-64a1-ca76-000000000b4e | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/iscsi', 'setype': 'container_file_t'} 2026-01-22 12:41:13.508725 | fa163e0d-6f45-64a1-ca76-000000000b4e | TIMING | create persistent directories | standalone | 0:02:07.473733 | 0.62s 2026-01-22 12:41:13.519734 | fa163e0d-6f45-64a1-ca76-000000000b4e | TIMING | create persistent directories | standalone | 0:02:07.484738 | 0.63s 2026-01-22 12:41:13.545373 | fa163e0d-6f45-64a1-ca76-000000000b4f | TASK | stat /lib/systemd/system/iscsid.socket 2026-01-22 12:41:13.757403 | fa163e0d-6f45-64a1-ca76-000000000b4f | OK | stat /lib/systemd/system/iscsid.socket | standalone 2026-01-22 12:41:13.758622 | fa163e0d-6f45-64a1-ca76-000000000b4f | TIMING | stat /lib/systemd/system/iscsid.socket | standalone | 0:02:07.723629 | 0.21s 2026-01-22 12:41:13.782791 | fa163e0d-6f45-64a1-ca76-000000000b50 | TASK | Stop and disable iscsid.socket service 2026-01-22 12:41:13.811746 | fa163e0d-6f45-64a1-ca76-000000000b50 | SKIPPED | Stop and disable iscsid.socket service | standalone 2026-01-22 12:41:13.812816 | fa163e0d-6f45-64a1-ca76-000000000b50 | TIMING | Stop and disable iscsid.socket service | standalone | 0:02:07.777825 | 0.03s 2026-01-22 12:41:13.836678 | fa163e0d-6f45-64a1-ca76-000000000b51 | TASK | Check if iscsi.service is enabled 2026-01-22 12:41:14.077368 | fa163e0d-6f45-64a1-ca76-000000000b51 | CHANGED | Check if iscsi.service is enabled | standalone 2026-01-22 12:41:14.078676 | fa163e0d-6f45-64a1-ca76-000000000b51 | TIMING | Check if iscsi.service is enabled | standalone | 0:02:08.043658 | 0.24s 2026-01-22 12:41:14.102723 | fa163e0d-6f45-64a1-ca76-000000000b52 | TASK | Stop iscsi.service 2026-01-22 12:41:14.142536 | fa163e0d-6f45-64a1-ca76-000000000b52 | SKIPPED | Stop iscsi.service | standalone 2026-01-22 12:41:14.143588 | fa163e0d-6f45-64a1-ca76-000000000b52 | TIMING | Stop iscsi.service | standalone | 0:02:08.108598 | 0.04s 2026-01-22 12:41:14.196520 | fa163e0d-6f45-64a1-ca76-000000000b53 | TIMING | include_role : tripleo_kernel | standalone | 0:02:08.161523 | 0.03s 2026-01-22 12:41:14.293355 | fa163e0d-6f45-64a1-ca76-00000000104c | TASK | Install additional packages 2026-01-22 12:41:14.319137 | fa163e0d-6f45-64a1-ca76-00000000104c | TIMING | tripleo_kernel : Install additional packages | standalone | 0:02:08.284154 | 0.03s 2026-01-22 12:41:14.335957 | fa163e0d-6f45-64a1-ca76-00000000104d | TASK | Remove dracut-config-generic 2026-01-22 12:41:18.155180 | fa163e0d-6f45-64a1-ca76-00000000104d | CHANGED | Remove dracut-config-generic | standalone 2026-01-22 12:41:18.156799 | fa163e0d-6f45-64a1-ca76-00000000104d | TIMING | tripleo_kernel : Remove dracut-config-generic | standalone | 0:02:12.121805 | 3.82s 2026-01-22 12:41:18.185830 | fa163e0d-6f45-64a1-ca76-00000000104e | TASK | Ensure the /etc/modules-load.d/ directory exists 2026-01-22 12:41:18.420303 | fa163e0d-6f45-64a1-ca76-00000000104e | OK | Ensure the /etc/modules-load.d/ directory exists | standalone 2026-01-22 12:41:18.421831 | fa163e0d-6f45-64a1-ca76-00000000104e | TIMING | tripleo_kernel : Ensure the /etc/modules-load.d/ directory exists | standalone | 0:02:12.386831 | 0.23s 2026-01-22 12:41:18.447388 | fa163e0d-6f45-64a1-ca76-00000000104f | TASK | Write list of modules to load at boot 2026-01-22 12:41:18.934817 | fa163e0d-6f45-64a1-ca76-00000000104f | CHANGED | Write list of modules to load at boot | standalone 2026-01-22 12:41:18.935614 | fa163e0d-6f45-64a1-ca76-00000000104f | TIMING | tripleo_kernel : Write list of modules to load at boot | standalone | 0:02:12.900632 | 0.49s 2026-01-22 12:41:18.955148 | fa163e0d-6f45-64a1-ca76-000000001050 | TASK | Modules reload 2026-01-22 12:41:19.371765 | fa163e0d-6f45-64a1-ca76-000000001050 | CHANGED | Modules reload | standalone 2026-01-22 12:41:19.373608 | fa163e0d-6f45-64a1-ca76-000000001050 | TIMING | tripleo_kernel : Modules reload | standalone | 0:02:13.338612 | 0.42s 2026-01-22 12:41:19.400590 | fa163e0d-6f45-64a1-ca76-000000001051 | TASK | Set default sysctl options 2026-01-22 12:41:19.892747 | fa163e0d-6f45-64a1-ca76-000000001051 | CHANGED | Set default sysctl options | standalone 2026-01-22 12:41:19.894122 | fa163e0d-6f45-64a1-ca76-000000001051 | TIMING | tripleo_kernel : Set default sysctl options | standalone | 0:02:13.859130 | 0.49s 2026-01-22 12:41:19.921903 | fa163e0d-6f45-64a1-ca76-000000001052 | TASK | Set extra sysctl options 2026-01-22 12:41:20.350385 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=fs.inotify.max_user_instances 2026-01-22 12:41:20.351887 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:14.316891 | 0.43s 2026-01-22 12:41:20.594185 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=kernel.pid_max 2026-01-22 12:41:20.595181 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:14.560192 | 0.67s 2026-01-22 12:41:20.805751 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.bridge.bridge-nf-call-arptables 2026-01-22 12:41:20.806742 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:14.771751 | 0.88s 2026-01-22 12:41:20.997422 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.bridge.bridge-nf-call-ip6tables 2026-01-22 12:41:20.998384 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:14.963395 | 1.07s 2026-01-22 12:41:21.240154 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.bridge.bridge-nf-call-iptables 2026-01-22 12:41:21.241631 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:15.206642 | 1.32s 2026-01-22 12:41:21.486540 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv4.conf.all.rp_filter 2026-01-22 12:41:21.488796 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:15.453799 | 1.56s 2026-01-22 12:41:21.699374 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv4.ip_forward 2026-01-22 12:41:21.700484 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:15.665494 | 1.78s 2026-01-22 12:41:21.946815 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv4.ip_local_reserved_ports 2026-01-22 12:41:21.948503 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:15.913512 | 2.02s 2026-01-22 12:41:22.162407 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv4.ip_nonlocal_bind 2026-01-22 12:41:22.163771 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:16.128788 | 2.24s 2026-01-22 12:41:22.378482 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv4.neigh.default.gc_thresh1 2026-01-22 12:41:22.379115 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:16.344133 | 2.46s 2026-01-22 12:41:22.624628 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv4.neigh.default.gc_thresh2 2026-01-22 12:41:22.625688 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:16.590662 | 2.70s 2026-01-22 12:41:22.889376 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv4.neigh.default.gc_thresh3 2026-01-22 12:41:22.891395 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:16.856404 | 2.97s 2026-01-22 12:41:23.123392 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv6.conf.all.disable_ipv6 2026-01-22 12:41:23.124935 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:17.089944 | 3.20s 2026-01-22 12:41:23.362373 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv6.conf.all.forwarding 2026-01-22 12:41:23.363385 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:17.328395 | 3.44s 2026-01-22 12:41:23.575560 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv6.conf.default.disable_ipv6 2026-01-22 12:41:23.576682 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:17.541661 | 3.65s 2026-01-22 12:41:23.790705 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv6.conf.lo.disable_ipv6 2026-01-22 12:41:23.792479 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:17.757488 | 3.87s 2026-01-22 12:41:24.073164 | fa163e0d-6f45-64a1-ca76-000000001052 | CHANGED | Set extra sysctl options | standalone | item=net.ipv6.ip_nonlocal_bind 2026-01-22 12:41:24.074190 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:18.039197 | 4.15s 2026-01-22 12:41:24.082458 | fa163e0d-6f45-64a1-ca76-000000001052 | TIMING | tripleo_kernel : Set extra sysctl options | standalone | 0:02:18.047459 | 4.16s 2026-01-22 12:41:24.111998 | fa163e0d-6f45-64a1-ca76-000000001054 | TASK | Sysctl reload 2026-01-22 12:41:24.537550 | fa163e0d-6f45-64a1-ca76-000000001054 | CHANGED | Sysctl reload | standalone 2026-01-22 12:41:24.539377 | fa163e0d-6f45-64a1-ca76-000000001054 | TIMING | tripleo_kernel : Sysctl reload | standalone | 0:02:18.504385 | 0.43s 2026-01-22 12:41:24.589019 | fa163e0d-6f45-64a1-ca76-000000000b54 | TASK | create persistent directories 2026-01-22 12:41:24.845283 | fa163e0d-6f45-64a1-ca76-000000000b54 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/keystone', 'setype': 'container_file_t'} 2026-01-22 12:41:24.846931 | fa163e0d-6f45-64a1-ca76-000000000b54 | TIMING | create persistent directories | standalone | 0:02:18.811932 | 0.26s 2026-01-22 12:41:25.007389 | fa163e0d-6f45-64a1-ca76-000000000b54 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/keystone', 'setype': 'container_file_t'} 2026-01-22 12:41:25.008416 | fa163e0d-6f45-64a1-ca76-000000000b54 | TIMING | create persistent directories | standalone | 0:02:18.973426 | 0.42s 2026-01-22 12:41:25.020774 | fa163e0d-6f45-64a1-ca76-000000000b54 | TIMING | create persistent directories | standalone | 0:02:18.985778 | 0.43s 2026-01-22 12:41:25.046111 | fa163e0d-6f45-64a1-ca76-000000000b55 | TASK | Check if file certs_valid exist 2026-01-22 12:41:25.260145 | fa163e0d-6f45-64a1-ca76-000000000b55 | OK | Check if file certs_valid exist | standalone 2026-01-22 12:41:25.261571 | fa163e0d-6f45-64a1-ca76-000000000b55 | TIMING | Check if file certs_valid exist | standalone | 0:02:19.226573 | 0.21s 2026-01-22 12:41:25.286791 | fa163e0d-6f45-64a1-ca76-000000000b56 | TASK | Check if file cert9.db exist 2026-01-22 12:41:25.514175 | fa163e0d-6f45-64a1-ca76-000000000b56 | OK | Check if file cert9.db exist | standalone 2026-01-22 12:41:25.515457 | fa163e0d-6f45-64a1-ca76-000000000b56 | TIMING | Check if file cert9.db exist | standalone | 0:02:19.480464 | 0.23s 2026-01-22 12:41:25.540442 | fa163e0d-6f45-64a1-ca76-000000000b57 | TASK | Check if file key4.db exist 2026-01-22 12:41:25.778849 | fa163e0d-6f45-64a1-ca76-000000000b57 | OK | Check if file key4.db exist | standalone 2026-01-22 12:41:25.780174 | fa163e0d-6f45-64a1-ca76-000000000b57 | TIMING | Check if file key4.db exist | standalone | 0:02:19.745181 | 0.24s 2026-01-22 12:41:25.834521 | fa163e0d-6f45-64a1-ca76-000000000b58 | SKIPPED | fail | standalone 2026-01-22 12:41:25.835639 | fa163e0d-6f45-64a1-ca76-000000000b58 | TIMING | fail | standalone | 0:02:19.800648 | 0.03s 2026-01-22 12:41:25.863783 | fa163e0d-6f45-64a1-ca76-000000000b59 | TASK | Create persistent directories 2026-01-22 12:41:26.133280 | fa163e0d-6f45-64a1-ca76-000000000b59 | CHANGED | Create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/manila', 'setype': 'container_file_t'} 2026-01-22 12:41:26.135736 | fa163e0d-6f45-64a1-ca76-000000000b59 | TIMING | Create persistent directories | standalone | 0:02:20.100734 | 0.27s 2026-01-22 12:41:26.365071 | fa163e0d-6f45-64a1-ca76-000000000b59 | CHANGED | Create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/manila-api', 'setype': 'container_file_t'} 2026-01-22 12:41:26.366152 | fa163e0d-6f45-64a1-ca76-000000000b59 | TIMING | Create persistent directories | standalone | 0:02:20.331163 | 0.50s 2026-01-22 12:41:26.378747 | fa163e0d-6f45-64a1-ca76-000000000b59 | TIMING | Create persistent directories | standalone | 0:02:20.343759 | 0.51s 2026-01-22 12:41:26.404083 | fa163e0d-6f45-64a1-ca76-000000000b5a | TASK | create persistent directories 2026-01-22 12:41:26.652896 | fa163e0d-6f45-64a1-ca76-000000000b5a | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/manila', 'setype': 'container_file_t'} 2026-01-22 12:41:26.654429 | fa163e0d-6f45-64a1-ca76-000000000b5a | TIMING | create persistent directories | standalone | 0:02:20.619432 | 0.25s 2026-01-22 12:41:26.666145 | fa163e0d-6f45-64a1-ca76-000000000b5a | TIMING | create persistent directories | standalone | 0:02:20.631156 | 0.26s 2026-01-22 12:41:26.686508 | fa163e0d-6f45-64a1-ca76-000000000b5b | TASK | create persistent directories 2026-01-22 12:41:26.959988 | fa163e0d-6f45-64a1-ca76-000000000b5b | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/manila', 'setype': 'container_file_t'} 2026-01-22 12:41:26.961594 | fa163e0d-6f45-64a1-ca76-000000000b5b | TIMING | create persistent directories | standalone | 0:02:20.926594 | 0.27s 2026-01-22 12:41:27.194734 | fa163e0d-6f45-64a1-ca76-000000000b5b | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/manila', 'setype': 'container_file_t'} 2026-01-22 12:41:27.195890 | fa163e0d-6f45-64a1-ca76-000000000b5b | TIMING | create persistent directories | standalone | 0:02:21.160900 | 0.51s 2026-01-22 12:41:27.201640 | fa163e0d-6f45-64a1-ca76-000000000b5b | TIMING | create persistent directories | standalone | 0:02:21.166651 | 0.51s 2026-01-22 12:41:27.219173 | fa163e0d-6f45-64a1-ca76-000000000b5c | TASK | create persistent directories 2026-01-22 12:41:27.455759 | fa163e0d-6f45-64a1-ca76-000000000b5c | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/memcached', 'setype': 'container_file_t'} 2026-01-22 12:41:27.456738 | fa163e0d-6f45-64a1-ca76-000000000b5c | TIMING | create persistent directories | standalone | 0:02:21.421753 | 0.24s 2026-01-22 12:41:27.460440 | fa163e0d-6f45-64a1-ca76-000000000b5c | TIMING | create persistent directories | standalone | 0:02:21.425443 | 0.24s 2026-01-22 12:41:27.486987 | fa163e0d-6f45-64a1-ca76-000000000b5d | TASK | create persistent directories 2026-01-22 12:41:27.751526 | fa163e0d-6f45-64a1-ca76-000000000b5d | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/mysql', 'setype': 'container_file_t'} 2026-01-22 12:41:27.753863 | fa163e0d-6f45-64a1-ca76-000000000b5d | TIMING | create persistent directories | standalone | 0:02:21.718862 | 0.26s 2026-01-22 12:41:27.948815 | fa163e0d-6f45-64a1-ca76-000000000b5d | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/mysql', 'setype': 'container_file_t'} 2026-01-22 12:41:27.949887 | fa163e0d-6f45-64a1-ca76-000000000b5d | TIMING | create persistent directories | standalone | 0:02:21.914898 | 0.46s 2026-01-22 12:41:28.172621 | fa163e0d-6f45-64a1-ca76-000000000b5d | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/mariadb', 'setype': 'container_file_t'} 2026-01-22 12:41:28.174111 | fa163e0d-6f45-64a1-ca76-000000000b5d | TIMING | create persistent directories | standalone | 0:02:22.139121 | 0.68s 2026-01-22 12:41:28.184182 | fa163e0d-6f45-64a1-ca76-000000000b5d | TIMING | create persistent directories | standalone | 0:02:22.149195 | 0.69s 2026-01-22 12:41:28.205147 | fa163e0d-6f45-64a1-ca76-000000000b5e | TASK | create persistent directories 2026-01-22 12:41:28.454371 | fa163e0d-6f45-64a1-ca76-000000000b5e | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/neutron', 'setype': 'container_file_t'} 2026-01-22 12:41:28.456604 | fa163e0d-6f45-64a1-ca76-000000000b5e | TIMING | create persistent directories | standalone | 0:02:22.421607 | 0.25s 2026-01-22 12:41:28.666933 | fa163e0d-6f45-64a1-ca76-000000000b5e | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/neutron-api', 'setype': 'container_file_t'} 2026-01-22 12:41:28.668368 | fa163e0d-6f45-64a1-ca76-000000000b5e | TIMING | create persistent directories | standalone | 0:02:22.633378 | 0.46s 2026-01-22 12:41:28.678255 | fa163e0d-6f45-64a1-ca76-000000000b5e | TIMING | create persistent directories | standalone | 0:02:22.643266 | 0.47s 2026-01-22 12:41:28.698349 | fa163e0d-6f45-64a1-ca76-000000000b5f | TASK | create persistent directories 2026-01-22 12:41:28.938964 | fa163e0d-6f45-64a1-ca76-000000000b5f | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/neutron', 'setype': 'container_file_t'} 2026-01-22 12:41:28.941042 | fa163e0d-6f45-64a1-ca76-000000000b5f | TIMING | create persistent directories | standalone | 0:02:22.906048 | 0.24s 2026-01-22 12:41:28.951205 | fa163e0d-6f45-64a1-ca76-000000000b5f | TIMING | create persistent directories | standalone | 0:02:22.916208 | 0.25s 2026-01-22 12:41:28.976810 | fa163e0d-6f45-64a1-ca76-000000000b60 | TASK | creating directory 2026-01-22 12:41:29.204899 | fa163e0d-6f45-64a1-ca76-000000000b60 | CHANGED | creating directory | standalone 2026-01-22 12:41:29.206159 | fa163e0d-6f45-64a1-ca76-000000000b60 | TIMING | creating directory | standalone | 0:02:23.171173 | 0.23s 2026-01-22 12:41:29.225778 | fa163e0d-6f45-64a1-ca76-000000000b61 | TASK | derive pci passthrough whitelist 2026-01-22 12:41:29.647714 | fa163e0d-6f45-64a1-ca76-000000000b61 | CHANGED | derive pci passthrough whitelist | standalone 2026-01-22 12:41:29.648953 | fa163e0d-6f45-64a1-ca76-000000000b61 | TIMING | derive pci passthrough whitelist | standalone | 0:02:23.613963 | 0.42s 2026-01-22 12:41:29.672959 | fa163e0d-6f45-64a1-ca76-000000000b62 | TASK | run derive_pci_passthrough_whitelist.py 2026-01-22 12:41:30.506074 | fa163e0d-6f45-64a1-ca76-000000000b62 | CHANGED | run derive_pci_passthrough_whitelist.py | standalone 2026-01-22 12:41:30.507186 | fa163e0d-6f45-64a1-ca76-000000000b62 | TIMING | run derive_pci_passthrough_whitelist.py | standalone | 0:02:24.472200 | 0.83s 2026-01-22 12:41:30.527500 | fa163e0d-6f45-64a1-ca76-000000000b63 | TASK | create /run/netns with temp namespace 2026-01-22 12:41:30.712962 | fa163e0d-6f45-64a1-ca76-000000000b63 | CHANGED | create /run/netns with temp namespace | standalone 2026-01-22 12:41:30.714341 | fa163e0d-6f45-64a1-ca76-000000000b63 | TIMING | create /run/netns with temp namespace | standalone | 0:02:24.679347 | 0.19s 2026-01-22 12:41:30.740174 | fa163e0d-6f45-64a1-ca76-000000000b64 | TASK | remove temp namespace 2026-01-22 12:41:30.991846 | fa163e0d-6f45-64a1-ca76-000000000b64 | CHANGED | remove temp namespace | standalone 2026-01-22 12:41:30.993207 | fa163e0d-6f45-64a1-ca76-000000000b64 | TIMING | remove temp namespace | standalone | 0:02:24.958214 | 0.25s 2026-01-22 12:41:31.019237 | fa163e0d-6f45-64a1-ca76-000000000b65 | TASK | create /var/lib/neutron 2026-01-22 12:41:31.227750 | fa163e0d-6f45-64a1-ca76-000000000b65 | CHANGED | create /var/lib/neutron | standalone 2026-01-22 12:41:31.229097 | fa163e0d-6f45-64a1-ca76-000000000b65 | TIMING | create /var/lib/neutron | standalone | 0:02:25.194104 | 0.21s 2026-01-22 12:41:31.253539 | fa163e0d-6f45-64a1-ca76-000000000b66 | TASK | set conditions 2026-01-22 12:41:31.282072 | fa163e0d-6f45-64a1-ca76-000000000b66 | OK | set conditions | standalone 2026-01-22 12:41:31.283154 | fa163e0d-6f45-64a1-ca76-000000000b66 | TIMING | set conditions | standalone | 0:02:25.248163 | 0.03s 2026-01-22 12:41:31.307330 | fa163e0d-6f45-64a1-ca76-000000000b67 | TASK | create kill_scripts directory within /var/lib/neutron 2026-01-22 12:41:31.526831 | fa163e0d-6f45-64a1-ca76-000000000b67 | CHANGED | create kill_scripts directory within /var/lib/neutron | standalone 2026-01-22 12:41:31.528181 | fa163e0d-6f45-64a1-ca76-000000000b67 | TIMING | create kill_scripts directory within /var/lib/neutron | standalone | 0:02:25.493190 | 0.22s 2026-01-22 12:41:31.552391 | fa163e0d-6f45-64a1-ca76-000000000b68 | TASK | create dnsmasq dhcp kill script 2026-01-22 12:41:32.026079 | fa163e0d-6f45-64a1-ca76-000000000b68 | CHANGED | create dnsmasq dhcp kill script | standalone 2026-01-22 12:41:32.027486 | fa163e0d-6f45-64a1-ca76-000000000b68 | TIMING | create dnsmasq dhcp kill script | standalone | 0:02:25.992493 | 0.47s 2026-01-22 12:41:32.054143 | fa163e0d-6f45-64a1-ca76-000000000b69 | TASK | create haproxy kill script 2026-01-22 12:41:32.464519 | fa163e0d-6f45-64a1-ca76-000000000b69 | CHANGED | create haproxy kill script | standalone 2026-01-22 12:41:32.465887 | fa163e0d-6f45-64a1-ca76-000000000b69 | TIMING | create haproxy kill script | standalone | 0:02:26.430896 | 0.41s 2026-01-22 12:41:32.490350 | fa163e0d-6f45-64a1-ca76-000000000b6a | TASK | create persistent directories 2026-01-22 12:41:32.747935 | fa163e0d-6f45-64a1-ca76-000000000b6a | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/nova', 'setype': 'container_file_t'} 2026-01-22 12:41:32.750131 | fa163e0d-6f45-64a1-ca76-000000000b6a | TIMING | create persistent directories | standalone | 0:02:26.715122 | 0.26s 2026-01-22 12:41:32.917989 | fa163e0d-6f45-64a1-ca76-000000000b6a | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/nova-api', 'setype': 'container_file_t'} 2026-01-22 12:41:32.919591 | fa163e0d-6f45-64a1-ca76-000000000b6a | TIMING | create persistent directories | standalone | 0:02:26.884601 | 0.43s 2026-01-22 12:41:32.930428 | fa163e0d-6f45-64a1-ca76-000000000b6a | TIMING | create persistent directories | standalone | 0:02:26.895431 | 0.44s 2026-01-22 12:41:32.956731 | fa163e0d-6f45-64a1-ca76-000000000b6b | TASK | create persistent directories 2026-01-22 12:41:33.206211 | fa163e0d-6f45-64a1-ca76-000000000b6b | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/nova', 'setype': 'container_file_t'} 2026-01-22 12:41:33.207498 | fa163e0d-6f45-64a1-ca76-000000000b6b | TIMING | create persistent directories | standalone | 0:02:27.172505 | 0.25s 2026-01-22 12:41:33.213746 | fa163e0d-6f45-64a1-ca76-000000000b6b | TIMING | create persistent directories | standalone | 0:02:27.178746 | 0.26s 2026-01-22 12:41:33.239874 | fa163e0d-6f45-64a1-ca76-000000000b6c | TASK | create persistent directories 2026-01-22 12:41:33.474188 | fa163e0d-6f45-64a1-ca76-000000000b6c | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/nova', 'setype': 'container_file_t'} 2026-01-22 12:41:33.475454 | fa163e0d-6f45-64a1-ca76-000000000b6c | TIMING | create persistent directories | standalone | 0:02:27.440463 | 0.23s 2026-01-22 12:41:33.705330 | fa163e0d-6f45-64a1-ca76-000000000b6c | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/nova-metadata', 'setype': 'container_file_t'} 2026-01-22 12:41:33.707104 | fa163e0d-6f45-64a1-ca76-000000000b6c | TIMING | create persistent directories | standalone | 0:02:27.672114 | 0.47s 2026-01-22 12:41:33.717806 | fa163e0d-6f45-64a1-ca76-000000000b6c | TIMING | create persistent directories | standalone | 0:02:27.682808 | 0.48s 2026-01-22 12:41:33.744435 | fa163e0d-6f45-64a1-ca76-000000000b6d | TASK | Check for /var/lib/nova/instances directory exitance 2026-01-22 12:41:33.967103 | fa163e0d-6f45-64a1-ca76-000000000b6d | OK | Check for /var/lib/nova/instances directory exitance | standalone 2026-01-22 12:41:33.968407 | fa163e0d-6f45-64a1-ca76-000000000b6d | TIMING | Check for /var/lib/nova/instances directory exitance | standalone | 0:02:27.933414 | 0.22s 2026-01-22 12:41:33.994227 | fa163e0d-6f45-64a1-ca76-000000000b6e | TASK | create persistent directories 2026-01-22 12:41:34.255152 | fa163e0d-6f45-64a1-ca76-000000000b6e | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/nova', 'setype': 'container_file_t'} 2026-01-22 12:41:34.256642 | fa163e0d-6f45-64a1-ca76-000000000b6e | TIMING | create persistent directories | standalone | 0:02:28.221644 | 0.26s 2026-01-22 12:41:34.499185 | fa163e0d-6f45-64a1-ca76-000000000b6e | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/_nova_secontext', 'setype': 'container_file_t'} 2026-01-22 12:41:34.500686 | fa163e0d-6f45-64a1-ca76-000000000b6e | TIMING | create persistent directories | standalone | 0:02:28.465661 | 0.50s 2026-01-22 12:41:34.707430 | fa163e0d-6f45-64a1-ca76-000000000b6e | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/nova/instances', 'setype': 'container_file_t'} 2026-01-22 12:41:34.709465 | fa163e0d-6f45-64a1-ca76-000000000b6e | TIMING | create persistent directories | standalone | 0:02:28.674468 | 0.71s 2026-01-22 12:41:34.933190 | fa163e0d-6f45-64a1-ca76-000000000b6e | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/libvirt', 'setype': 'container_file_t'} 2026-01-22 12:41:34.935058 | fa163e0d-6f45-64a1-ca76-000000000b6e | TIMING | create persistent directories | standalone | 0:02:28.900070 | 0.94s 2026-01-22 12:41:35.168883 | fa163e0d-6f45-64a1-ca76-000000000b6e | OK | create persistent directories | standalone | item={'path': '/etc/tmpfiles.d'} 2026-01-22 12:41:35.170819 | fa163e0d-6f45-64a1-ca76-000000000b6e | TIMING | create persistent directories | standalone | 0:02:29.135829 | 1.17s 2026-01-22 12:41:35.182157 | fa163e0d-6f45-64a1-ca76-000000000b6e | TIMING | create persistent directories | standalone | 0:02:29.147168 | 1.19s 2026-01-22 12:41:35.209240 | fa163e0d-6f45-64a1-ca76-000000000b6f | TASK | ensure /run/nova is present upon reboot 2026-01-22 12:41:35.652257 | fa163e0d-6f45-64a1-ca76-000000000b6f | CHANGED | ensure /run/nova is present upon reboot | standalone 2026-01-22 12:41:35.653720 | fa163e0d-6f45-64a1-ca76-000000000b6f | TIMING | ensure /run/nova is present upon reboot | standalone | 0:02:29.618723 | 0.44s 2026-01-22 12:41:35.679363 | fa163e0d-6f45-64a1-ca76-000000000b70 | TASK | create tempfiles 2026-01-22 12:41:35.986104 | fa163e0d-6f45-64a1-ca76-000000000b70 | CHANGED | create tempfiles | standalone 2026-01-22 12:41:35.988036 | fa163e0d-6f45-64a1-ca76-000000000b70 | TIMING | create tempfiles | standalone | 0:02:29.953040 | 0.31s 2026-01-22 12:41:36.013578 | fa163e0d-6f45-64a1-ca76-000000000b71 | TASK | Mount Nova NFS Share 2026-01-22 12:41:36.043385 | fa163e0d-6f45-64a1-ca76-000000000b71 | SKIPPED | Mount Nova NFS Share | standalone 2026-01-22 12:41:36.044680 | fa163e0d-6f45-64a1-ca76-000000000b71 | TIMING | Mount Nova NFS Share | standalone | 0:02:30.009653 | 0.03s 2026-01-22 12:41:36.069342 | fa163e0d-6f45-64a1-ca76-000000000b73 | TASK | check systemd-container package installed or not 2026-01-22 12:41:36.097860 | fa163e0d-6f45-64a1-ca76-000000000b73 | SKIPPED | check systemd-container package installed or not | standalone 2026-01-22 12:41:36.099134 | fa163e0d-6f45-64a1-ca76-000000000b73 | TIMING | check systemd-container package installed or not | standalone | 0:02:30.064141 | 0.03s 2026-01-22 12:41:36.123561 | fa163e0d-6f45-64a1-ca76-000000000b74 | TASK | get latest package from downloaded package versions 2026-01-22 12:41:36.150437 | fa163e0d-6f45-64a1-ca76-000000000b74 | SKIPPED | get latest package from downloaded package versions | standalone 2026-01-22 12:41:36.151801 | fa163e0d-6f45-64a1-ca76-000000000b74 | TIMING | get latest package from downloaded package versions | standalone | 0:02:30.116808 | 0.03s 2026-01-22 12:41:36.178524 | fa163e0d-6f45-64a1-ca76-000000000b75 | TASK | Install systemd-container package on boot 2026-01-22 12:41:36.207579 | fa163e0d-6f45-64a1-ca76-000000000b75 | SKIPPED | Install systemd-container package on boot | standalone 2026-01-22 12:41:36.208736 | fa163e0d-6f45-64a1-ca76-000000000b75 | TIMING | Install systemd-container package on boot | standalone | 0:02:30.173743 | 0.03s 2026-01-22 12:41:36.233032 | fa163e0d-6f45-64a1-ca76-000000000b76 | TASK | enable install-systemd-container on compute boot 2026-01-22 12:41:36.262774 | fa163e0d-6f45-64a1-ca76-000000000b76 | SKIPPED | enable install-systemd-container on compute boot | standalone 2026-01-22 12:41:36.263822 | fa163e0d-6f45-64a1-ca76-000000000b76 | TIMING | enable install-systemd-container on compute boot | standalone | 0:02:30.228830 | 0.03s 2026-01-22 12:41:36.287178 | fa163e0d-6f45-64a1-ca76-000000000b77 | TASK | check if install-systemd-container service exists 2026-01-22 12:41:36.315468 | fa163e0d-6f45-64a1-ca76-000000000b77 | SKIPPED | check if install-systemd-container service exists | standalone 2026-01-22 12:41:36.316503 | fa163e0d-6f45-64a1-ca76-000000000b77 | TIMING | check if install-systemd-container service exists | standalone | 0:02:30.281512 | 0.03s 2026-01-22 12:41:36.340602 | fa163e0d-6f45-64a1-ca76-000000000b78 | TASK | disable and mask install-systemd-container on compute boot 2026-01-22 12:41:36.368951 | fa163e0d-6f45-64a1-ca76-000000000b78 | SKIPPED | disable and mask install-systemd-container on compute boot | standalone 2026-01-22 12:41:36.370036 | fa163e0d-6f45-64a1-ca76-000000000b78 | TIMING | disable and mask install-systemd-container on compute boot | standalone | 0:02:30.335045 | 0.03s 2026-01-22 12:41:36.394274 | fa163e0d-6f45-64a1-ca76-000000000b7a | TASK | is Nova Resume Guests State On Host Boot enabled 2026-01-22 12:41:36.433469 | fa163e0d-6f45-64a1-ca76-000000000b7a | OK | is Nova Resume Guests State On Host Boot enabled | standalone 2026-01-22 12:41:36.434515 | fa163e0d-6f45-64a1-ca76-000000000b7a | TIMING | is Nova Resume Guests State On Host Boot enabled | standalone | 0:02:30.399526 | 0.04s 2026-01-22 12:41:36.459446 | fa163e0d-6f45-64a1-ca76-000000000b7c | TASK | libvirt-guests unit to stop nova_compute container before shutdown VMs 2026-01-22 12:41:36.490798 | fa163e0d-6f45-64a1-ca76-000000000b7c | SKIPPED | libvirt-guests unit to stop nova_compute container before shutdown VMs | standalone 2026-01-22 12:41:36.491882 | fa163e0d-6f45-64a1-ca76-000000000b7c | TIMING | libvirt-guests unit to stop nova_compute container before shutdown VMs | standalone | 0:02:30.456891 | 0.03s 2026-01-22 12:41:36.515832 | fa163e0d-6f45-64a1-ca76-000000000b7d | TASK | Making sure virt-guest-shutdown.target is present 2026-01-22 12:41:36.548040 | fa163e0d-6f45-64a1-ca76-000000000b7d | SKIPPED | Making sure virt-guest-shutdown.target is present | standalone 2026-01-22 12:41:36.549081 | fa163e0d-6f45-64a1-ca76-000000000b7d | TIMING | Making sure virt-guest-shutdown.target is present | standalone | 0:02:30.514090 | 0.03s 2026-01-22 12:41:36.573243 | fa163e0d-6f45-64a1-ca76-000000000b7e | TASK | libvirt-guests enable VM shutdown on compute reboot/shutdown 2026-01-22 12:41:36.603358 | fa163e0d-6f45-64a1-ca76-000000000b7e | SKIPPED | libvirt-guests enable VM shutdown on compute reboot/shutdown | standalone 2026-01-22 12:41:36.604413 | fa163e0d-6f45-64a1-ca76-000000000b7e | TIMING | libvirt-guests enable VM shutdown on compute reboot/shutdown | standalone | 0:02:30.569422 | 0.03s 2026-01-22 12:41:36.629250 | fa163e0d-6f45-64a1-ca76-000000000b80 | TASK | libvirt-guests unit to stop nova_compute container before shutdown VMs (monolithic libvirt) 2026-01-22 12:41:36.660283 | fa163e0d-6f45-64a1-ca76-000000000b80 | SKIPPED | libvirt-guests unit to stop nova_compute container before shutdown VMs (monolithic libvirt) | standalone 2026-01-22 12:41:36.661433 | fa163e0d-6f45-64a1-ca76-000000000b80 | TIMING | libvirt-guests unit to stop nova_compute container before shutdown VMs (monolithic libvirt) | standalone | 0:02:30.626442 | 0.03s 2026-01-22 12:41:36.685964 | fa163e0d-6f45-64a1-ca76-000000000b81 | TASK | libvirt-guests unit to stop nova_compute container before shutdown VMs (modular libvirt) 2026-01-22 12:41:36.717317 | fa163e0d-6f45-64a1-ca76-000000000b81 | SKIPPED | libvirt-guests unit to stop nova_compute container before shutdown VMs (modular libvirt) | standalone 2026-01-22 12:41:36.718397 | fa163e0d-6f45-64a1-ca76-000000000b81 | TIMING | libvirt-guests unit to stop nova_compute container before shutdown VMs (modular libvirt) | standalone | 0:02:30.683405 | 0.03s 2026-01-22 12:41:36.742409 | fa163e0d-6f45-64a1-ca76-000000000b82 | TASK | Making sure virt-guest-shutdown.target is present 2026-01-22 12:41:36.774858 | fa163e0d-6f45-64a1-ca76-000000000b82 | SKIPPED | Making sure virt-guest-shutdown.target is present | standalone 2026-01-22 12:41:36.775944 | fa163e0d-6f45-64a1-ca76-000000000b82 | TIMING | Making sure virt-guest-shutdown.target is present | standalone | 0:02:30.740953 | 0.03s 2026-01-22 12:41:36.800030 | fa163e0d-6f45-64a1-ca76-000000000b83 | TASK | tripleo_nova_libvirt_guests enable VM shutdown on compute reboot/shutdown 2026-01-22 12:41:36.830261 | fa163e0d-6f45-64a1-ca76-000000000b83 | SKIPPED | tripleo_nova_libvirt_guests enable VM shutdown on compute reboot/shutdown | standalone 2026-01-22 12:41:36.831386 | fa163e0d-6f45-64a1-ca76-000000000b83 | TIMING | tripleo_nova_libvirt_guests enable VM shutdown on compute reboot/shutdown | standalone | 0:02:30.796393 | 0.03s 2026-01-22 12:41:36.855497 | fa163e0d-6f45-64a1-ca76-000000000b85 | TASK | Do we prepend nova startup with a delay 2026-01-22 12:41:36.884023 | fa163e0d-6f45-64a1-ca76-000000000b85 | OK | Do we prepend nova startup with a delay | standalone 2026-01-22 12:41:36.885272 | fa163e0d-6f45-64a1-ca76-000000000b85 | TIMING | Do we prepend nova startup with a delay | standalone | 0:02:30.850281 | 0.03s 2026-01-22 12:41:36.910199 | fa163e0d-6f45-64a1-ca76-000000000b86 | TASK | install nova-compute delay wrapper script 2026-01-22 12:41:36.959742 | fa163e0d-6f45-64a1-ca76-000000000b86 | SKIPPED | install nova-compute delay wrapper script | standalone 2026-01-22 12:41:36.960921 | fa163e0d-6f45-64a1-ca76-000000000b86 | TIMING | install nova-compute delay wrapper script | standalone | 0:02:30.925931 | 0.05s 2026-01-22 12:41:36.985457 | fa163e0d-6f45-64a1-ca76-000000000b87 | TASK | Is irqbalance enabled 2026-01-22 12:41:37.019554 | fa163e0d-6f45-64a1-ca76-000000000b87 | OK | Is irqbalance enabled | standalone 2026-01-22 12:41:37.020615 | fa163e0d-6f45-64a1-ca76-000000000b87 | TIMING | Is irqbalance enabled | standalone | 0:02:30.985625 | 0.03s 2026-01-22 12:41:37.044308 | fa163e0d-6f45-64a1-ca76-000000000b88 | TASK | disable irqbalance service on compute 2026-01-22 12:41:37.073825 | fa163e0d-6f45-64a1-ca76-000000000b88 | SKIPPED | disable irqbalance service on compute | standalone 2026-01-22 12:41:37.074880 | fa163e0d-6f45-64a1-ca76-000000000b88 | TIMING | disable irqbalance service on compute | standalone | 0:02:31.039890 | 0.03s 2026-01-22 12:41:37.099499 | fa163e0d-6f45-64a1-ca76-000000000b89 | TASK | create persistent directories 2026-01-22 12:41:37.366952 | fa163e0d-6f45-64a1-ca76-000000000b89 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/libvirt', 'setype': 'container_file_t'} 2026-01-22 12:41:37.368519 | fa163e0d-6f45-64a1-ca76-000000000b89 | TIMING | create persistent directories | standalone | 0:02:31.333525 | 0.27s 2026-01-22 12:41:37.377832 | fa163e0d-6f45-64a1-ca76-000000000b89 | TIMING | create persistent directories | standalone | 0:02:31.342833 | 0.28s 2026-01-22 12:41:37.402282 | fa163e0d-6f45-64a1-ca76-000000000b8a | TASK | create libvirt persistent data directories 2026-01-22 12:41:37.686558 | fa163e0d-6f45-64a1-ca76-000000000b8a | CHANGED | create libvirt persistent data directories | standalone | item={'path': '/etc/libvirt', 'setype': 'container_file_t'} 2026-01-22 12:41:37.689083 | fa163e0d-6f45-64a1-ca76-000000000b8a | TIMING | create libvirt persistent data directories | standalone | 0:02:31.654081 | 0.29s 2026-01-22 12:41:37.924193 | fa163e0d-6f45-64a1-ca76-000000000b8a | CHANGED | create libvirt persistent data directories | standalone | item={'path': '/etc/libvirt/secrets', 'setype': 'container_file_t'} 2026-01-22 12:41:37.925312 | fa163e0d-6f45-64a1-ca76-000000000b8a | TIMING | create libvirt persistent data directories | standalone | 0:02:31.890323 | 0.52s 2026-01-22 12:41:38.156198 | fa163e0d-6f45-64a1-ca76-000000000b8a | CHANGED | create libvirt persistent data directories | standalone | item={'path': '/etc/libvirt/qemu', 'setype': 'container_file_t'} 2026-01-22 12:41:38.158105 | fa163e0d-6f45-64a1-ca76-000000000b8a | TIMING | create libvirt persistent data directories | standalone | 0:02:32.123114 | 0.75s 2026-01-22 12:41:38.380316 | fa163e0d-6f45-64a1-ca76-000000000b8a | OK | create libvirt persistent data directories | standalone | item={'path': '/var/lib/libvirt', 'setype': 'container_file_t'} 2026-01-22 12:41:38.382316 | fa163e0d-6f45-64a1-ca76-000000000b8a | TIMING | create libvirt persistent data directories | standalone | 0:02:32.347326 | 0.98s 2026-01-22 12:41:38.598810 | fa163e0d-6f45-64a1-ca76-000000000b8a | CHANGED | create libvirt persistent data directories | standalone | item={'path': '/var/cache/libvirt'} 2026-01-22 12:41:38.600871 | fa163e0d-6f45-64a1-ca76-000000000b8a | TIMING | create libvirt persistent data directories | standalone | 0:02:32.565878 | 1.20s 2026-01-22 12:41:38.822714 | fa163e0d-6f45-64a1-ca76-000000000b8a | OK | create libvirt persistent data directories | standalone | item={'path': '/var/lib/nova', 'setype': 'container_file_t'} 2026-01-22 12:41:38.824572 | fa163e0d-6f45-64a1-ca76-000000000b8a | TIMING | create libvirt persistent data directories | standalone | 0:02:32.789580 | 1.42s 2026-01-22 12:41:39.074556 | fa163e0d-6f45-64a1-ca76-000000000b8a | CHANGED | create libvirt persistent data directories | standalone | item={'path': '/run/libvirt'} 2026-01-22 12:41:39.075769 | fa163e0d-6f45-64a1-ca76-000000000b8a | TIMING | create libvirt persistent data directories | standalone | 0:02:33.040779 | 1.67s 2026-01-22 12:41:39.326000 | fa163e0d-6f45-64a1-ca76-000000000b8a | CHANGED | create libvirt persistent data directories | standalone | item={'mode': '0770', 'path': '/var/log/containers/libvirt/swtpm', 'setype': 'container_file_t'} 2026-01-22 12:41:39.327961 | fa163e0d-6f45-64a1-ca76-000000000b8a | TIMING | create libvirt persistent data directories | standalone | 0:02:33.292969 | 1.92s 2026-01-22 12:41:39.340087 | fa163e0d-6f45-64a1-ca76-000000000b8a | TIMING | create libvirt persistent data directories | standalone | 0:02:33.305090 | 1.94s 2026-01-22 12:41:39.368305 | fa163e0d-6f45-64a1-ca76-000000000b8b | TASK | ensure qemu group is present on the host 2026-01-22 12:41:40.021560 | fa163e0d-6f45-64a1-ca76-000000000b8b | CHANGED | ensure qemu group is present on the host | standalone 2026-01-22 12:41:40.023188 | fa163e0d-6f45-64a1-ca76-000000000b8b | TIMING | ensure qemu group is present on the host | standalone | 0:02:33.988193 | 0.65s 2026-01-22 12:41:40.048451 | fa163e0d-6f45-64a1-ca76-000000000b8c | TASK | ensure qemu user is present on the host 2026-01-22 12:41:40.740031 | fa163e0d-6f45-64a1-ca76-000000000b8c | CHANGED | ensure qemu user is present on the host | standalone 2026-01-22 12:41:40.741562 | fa163e0d-6f45-64a1-ca76-000000000b8c | TIMING | ensure qemu user is present on the host | standalone | 0:02:34.706570 | 0.69s 2026-01-22 12:41:40.767823 | fa163e0d-6f45-64a1-ca76-000000000b8d | TASK | create directory for vhost-user sockets with qemu ownership 2026-01-22 12:41:41.015232 | fa163e0d-6f45-64a1-ca76-000000000b8d | CHANGED | create directory for vhost-user sockets with qemu ownership | standalone 2026-01-22 12:41:41.017055 | fa163e0d-6f45-64a1-ca76-000000000b8d | TIMING | create directory for vhost-user sockets with qemu ownership | standalone | 0:02:34.982059 | 0.25s 2026-01-22 12:41:41.042022 | fa163e0d-6f45-64a1-ca76-000000000b8e | TASK | check if libvirt is installed 2026-01-22 12:41:41.294536 | fa163e0d-6f45-64a1-ca76-000000000b8e | CHANGED | check if libvirt is installed | standalone 2026-01-22 12:41:41.296005 | fa163e0d-6f45-64a1-ca76-000000000b8e | TIMING | check if libvirt is installed | standalone | 0:02:35.261012 | 0.25s 2026-01-22 12:41:41.321345 | fa163e0d-6f45-64a1-ca76-000000000b8f | TASK | make sure libvirt services are disabled and masked 2026-01-22 12:41:41.354809 | fa163e0d-6f45-64a1-ca76-000000000b8f | SKIPPED | make sure libvirt services are disabled and masked | standalone | item=libvirtd.service 2026-01-22 12:41:41.361693 | fa163e0d-6f45-64a1-ca76-000000000b8f | SKIPPED | make sure libvirt services are disabled and masked | standalone | item=virtlogd.socket 2026-01-22 12:41:41.368163 | fa163e0d-6f45-64a1-ca76-000000000b8f | TIMING | make sure libvirt services are disabled and masked | standalone | 0:02:35.333174 | 0.05s 2026-01-22 12:41:41.387843 | fa163e0d-6f45-64a1-ca76-000000000b90 | TASK | ensure /run/libvirt is present upon reboot 2026-01-22 12:41:41.888883 | fa163e0d-6f45-64a1-ca76-000000000b90 | CHANGED | ensure /run/libvirt is present upon reboot | standalone 2026-01-22 12:41:41.890513 | fa163e0d-6f45-64a1-ca76-000000000b90 | TIMING | ensure /run/libvirt is present upon reboot | standalone | 0:02:35.855515 | 0.50s 2026-01-22 12:41:41.915222 | fa163e0d-6f45-64a1-ca76-000000000b91 | TASK | Enable os_enable_vtpm SELinux boolean for vTPM 2026-01-22 12:41:43.035773 | fa163e0d-6f45-64a1-ca76-000000000b91 | CHANGED | Enable os_enable_vtpm SELinux boolean for vTPM | standalone 2026-01-22 12:41:43.038248 | fa163e0d-6f45-64a1-ca76-000000000b91 | TIMING | Enable os_enable_vtpm SELinux boolean for vTPM | standalone | 0:02:37.003252 | 1.12s 2026-01-22 12:41:43.065385 | fa163e0d-6f45-64a1-ca76-000000000b92 | TASK | remove gnutls-qemu.config 2026-01-22 12:41:43.311163 | fa163e0d-6f45-64a1-ca76-000000000b92 | OK | remove gnutls-qemu.config | standalone 2026-01-22 12:41:43.312996 | fa163e0d-6f45-64a1-ca76-000000000b92 | TIMING | remove gnutls-qemu.config | standalone | 0:02:37.277991 | 0.25s 2026-01-22 12:41:43.343601 | fa163e0d-6f45-64a1-ca76-000000000b93 | TASK | update crypto policies 2026-01-22 12:41:43.375564 | fa163e0d-6f45-64a1-ca76-000000000b93 | SKIPPED | update crypto policies | standalone 2026-01-22 12:41:43.377238 | fa163e0d-6f45-64a1-ca76-000000000b93 | TIMING | update crypto policies | standalone | 0:02:37.342238 | 0.03s 2026-01-22 12:41:43.399134 | fa163e0d-6f45-64a1-ca76-000000000b94 | TASK | Create libvirt persistent data directories 2026-01-22 12:41:43.641711 | fa163e0d-6f45-64a1-ca76-000000000b94 | OK | Create libvirt persistent data directories | standalone | item={'path': '/run/libvirt', 'setype': 'virt_var_run_t'} 2026-01-22 12:41:43.644822 | fa163e0d-6f45-64a1-ca76-000000000b94 | TIMING | Create libvirt persistent data directories | standalone | 0:02:37.609826 | 0.24s 2026-01-22 12:41:43.657474 | fa163e0d-6f45-64a1-ca76-000000000b94 | TIMING | Create libvirt persistent data directories | standalone | 0:02:37.622474 | 0.26s 2026-01-22 12:41:43.683877 | fa163e0d-6f45-64a1-ca76-000000000b95 | TASK | create persistent directories 2026-01-22 12:41:43.953976 | fa163e0d-6f45-64a1-ca76-000000000b95 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/openvswitch', 'setype': 'container_file_t'} 2026-01-22 12:41:43.957737 | fa163e0d-6f45-64a1-ca76-000000000b95 | TIMING | create persistent directories | standalone | 0:02:37.922673 | 0.27s 2026-01-22 12:41:44.151537 | fa163e0d-6f45-64a1-ca76-000000000b95 | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/openvswitch/ovn', 'setype': 'container_file_t'} 2026-01-22 12:41:44.153200 | fa163e0d-6f45-64a1-ca76-000000000b95 | TIMING | create persistent directories | standalone | 0:02:38.118215 | 0.47s 2026-01-22 12:41:44.158466 | fa163e0d-6f45-64a1-ca76-000000000b95 | TIMING | create persistent directories | standalone | 0:02:38.123467 | 0.47s 2026-01-22 12:41:44.185081 | fa163e0d-6f45-64a1-ca76-000000000b96 | TASK | Copy in cleanup script 2026-01-22 12:41:44.638084 | fa163e0d-6f45-64a1-ca76-000000000b96 | CHANGED | Copy in cleanup script | standalone 2026-01-22 12:41:44.639132 | fa163e0d-6f45-64a1-ca76-000000000b96 | TIMING | Copy in cleanup script | standalone | 0:02:38.604143 | 0.45s 2026-01-22 12:41:44.663452 | fa163e0d-6f45-64a1-ca76-000000000b97 | TASK | Copy in cleanup service 2026-01-22 12:41:45.127699 | fa163e0d-6f45-64a1-ca76-000000000b97 | CHANGED | Copy in cleanup service | standalone 2026-01-22 12:41:45.129165 | fa163e0d-6f45-64a1-ca76-000000000b97 | TIMING | Copy in cleanup service | standalone | 0:02:39.094170 | 0.46s 2026-01-22 12:41:45.153322 | fa163e0d-6f45-64a1-ca76-000000000b98 | TASK | Enabling the cleanup service 2026-01-22 12:41:45.804573 | fa163e0d-6f45-64a1-ca76-000000000b98 | CHANGED | Enabling the cleanup service | standalone 2026-01-22 12:41:45.806365 | fa163e0d-6f45-64a1-ca76-000000000b98 | TIMING | Enabling the cleanup service | standalone | 0:02:39.771372 | 0.65s 2026-01-22 12:41:45.831971 | fa163e0d-6f45-64a1-ca76-000000000b99 | TASK | create persistent directories 2026-01-22 12:41:46.091925 | fa163e0d-6f45-64a1-ca76-000000000b99 | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/openvswitch', 'setype': 'container_file_t'} 2026-01-22 12:41:46.095699 | fa163e0d-6f45-64a1-ca76-000000000b99 | TIMING | create persistent directories | standalone | 0:02:40.060700 | 0.26s 2026-01-22 12:41:46.302412 | fa163e0d-6f45-64a1-ca76-000000000b99 | OK | create persistent directories | standalone | item={'path': '/var/lib/openvswitch/ovn', 'setype': 'container_file_t'} 2026-01-22 12:41:46.305398 | fa163e0d-6f45-64a1-ca76-000000000b99 | TIMING | create persistent directories | standalone | 0:02:40.270406 | 0.47s 2026-01-22 12:41:46.314725 | fa163e0d-6f45-64a1-ca76-000000000b99 | TIMING | create persistent directories | standalone | 0:02:40.279724 | 0.48s 2026-01-22 12:41:46.339917 | fa163e0d-6f45-64a1-ca76-000000000b9a | TASK | set conditions 2026-01-22 12:41:46.379316 | fa163e0d-6f45-64a1-ca76-000000000b9a | OK | set conditions | standalone 2026-01-22 12:41:46.380793 | fa163e0d-6f45-64a1-ca76-000000000b9a | TIMING | set conditions | standalone | 0:02:40.345800 | 0.04s 2026-01-22 12:41:46.405303 | fa163e0d-6f45-64a1-ca76-000000000b9b | TASK | create persistent directories 2026-01-22 12:41:46.632032 | fa163e0d-6f45-64a1-ca76-000000000b9b | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/octavia', 'setype': 'container_file_t'} 2026-01-22 12:41:46.633595 | fa163e0d-6f45-64a1-ca76-000000000b9b | TIMING | create persistent directories | standalone | 0:02:40.598595 | 0.23s 2026-01-22 12:41:46.874453 | fa163e0d-6f45-64a1-ca76-000000000b9b | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/octavia-api', 'setype': 'container_file_t'} 2026-01-22 12:41:46.875557 | fa163e0d-6f45-64a1-ca76-000000000b9b | TIMING | create persistent directories | standalone | 0:02:40.840567 | 0.47s 2026-01-22 12:41:47.082213 | fa163e0d-6f45-64a1-ca76-000000000b9b | CHANGED | create persistent directories | standalone | item={'mode': '0755', 'path': '/run/octavia', 'setype': 'container_file_t'} 2026-01-22 12:41:47.083908 | fa163e0d-6f45-64a1-ca76-000000000b9b | TIMING | create persistent directories | standalone | 0:02:41.048916 | 0.68s 2026-01-22 12:41:47.095051 | fa163e0d-6f45-64a1-ca76-000000000b9b | TIMING | create persistent directories | standalone | 0:02:41.060051 | 0.69s 2026-01-22 12:41:47.120498 | fa163e0d-6f45-64a1-ca76-000000000b9c | TASK | ensure /run/octavia is present upon reboot 2026-01-22 12:41:47.591624 | fa163e0d-6f45-64a1-ca76-000000000b9c | CHANGED | ensure /run/octavia is present upon reboot | standalone 2026-01-22 12:41:47.592663 | fa163e0d-6f45-64a1-ca76-000000000b9c | TIMING | ensure /run/octavia is present upon reboot | standalone | 0:02:41.557653 | 0.47s 2026-01-22 12:41:47.612322 | fa163e0d-6f45-64a1-ca76-000000000b9d | TASK | create persistent directories 2026-01-22 12:41:47.829744 | fa163e0d-6f45-64a1-ca76-000000000b9d | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/octavia', 'setype': 'container_file_t'} 2026-01-22 12:41:47.831087 | fa163e0d-6f45-64a1-ca76-000000000b9d | TIMING | create persistent directories | standalone | 0:02:41.796101 | 0.22s 2026-01-22 12:41:47.988776 | fa163e0d-6f45-64a1-ca76-000000000b9d | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/octavia-amphorae', 'setype': 'container_file_t'} 2026-01-22 12:41:47.989929 | fa163e0d-6f45-64a1-ca76-000000000b9d | TIMING | create persistent directories | standalone | 0:02:41.954947 | 0.38s 2026-01-22 12:41:48.000795 | fa163e0d-6f45-64a1-ca76-000000000b9d | TIMING | create persistent directories | standalone | 0:02:41.965799 | 0.39s 2026-01-22 12:41:48.025937 | fa163e0d-6f45-64a1-ca76-000000000b9e | TASK | create empty log files 2026-01-22 12:41:48.277250 | fa163e0d-6f45-64a1-ca76-000000000b9e | CHANGED | create empty log files | standalone | item={'mode': '0644', 'path': '/var/log/containers/octavia-amphorae/octavia-tenant-traffic.log', 'setype': 'container_file_t'} 2026-01-22 12:41:48.279627 | fa163e0d-6f45-64a1-ca76-000000000b9e | TIMING | create empty log files | standalone | 0:02:42.244627 | 0.25s 2026-01-22 12:41:48.488688 | fa163e0d-6f45-64a1-ca76-000000000b9e | CHANGED | create empty log files | standalone | item={'mode': '0644', 'path': '/var/log/containers/octavia-amphorae/octavia-amphora.log', 'setype': 'container_file_t'} 2026-01-22 12:41:48.489418 | fa163e0d-6f45-64a1-ca76-000000000b9e | TIMING | create empty log files | standalone | 0:02:42.454437 | 0.46s 2026-01-22 12:41:48.494146 | fa163e0d-6f45-64a1-ca76-000000000b9e | TIMING | create empty log files | standalone | 0:02:42.459157 | 0.47s 2026-01-22 12:41:48.511498 | fa163e0d-6f45-64a1-ca76-000000000b9f | TASK | create persistent directories 2026-01-22 12:41:48.700424 | fa163e0d-6f45-64a1-ca76-000000000b9f | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/octavia', 'setype': 'container_file_t'} 2026-01-22 12:41:48.701998 | fa163e0d-6f45-64a1-ca76-000000000b9f | TIMING | create persistent directories | standalone | 0:02:42.666998 | 0.19s 2026-01-22 12:41:48.711193 | fa163e0d-6f45-64a1-ca76-000000000b9f | TIMING | create persistent directories | standalone | 0:02:42.676197 | 0.20s 2026-01-22 12:41:48.735746 | fa163e0d-6f45-64a1-ca76-000000000ba1 | TASK | Ensure packages required for configuring octavia are present 2026-01-22 12:41:51.722251 | fa163e0d-6f45-64a1-ca76-000000000ba1 | OK | Ensure packages required for configuring octavia are present | standalone 2026-01-22 12:41:51.723011 | fa163e0d-6f45-64a1-ca76-000000000ba1 | TIMING | Ensure packages required for configuring octavia are present | standalone | 0:02:45.688029 | 2.99s 2026-01-22 12:41:51.765385 | fa163e0d-6f45-64a1-ca76-000000000ba3 | TIMING | include_role : tripleo_clients_install | standalone | 0:02:45.730394 | 0.02s 2026-01-22 12:41:51.806585 | fa163e0d-6f45-64a1-ca76-00000000129e | TASK | Gather variables for each operating system 2026-01-22 12:41:51.917400 | fa163e0d-6f45-64a1-ca76-00000000129e | TIMING | tripleo_clients_install : Gather variables for each operating system | standalone | 0:02:45.882401 | 0.11s 2026-01-22 12:41:51.941809 | fa163e0d-6f45-64a1-ca76-00000000129f | TASK | Set packages facts 2026-01-22 12:41:51.981326 | fa163e0d-6f45-64a1-ca76-00000000129f | OK | Set packages facts | standalone 2026-01-22 12:41:51.982637 | fa163e0d-6f45-64a1-ca76-00000000129f | TIMING | tripleo_clients_install : Set packages facts | standalone | 0:02:45.947645 | 0.04s 2026-01-22 12:41:52.008283 | fa163e0d-6f45-64a1-ca76-0000000012a0 | TASK | Manage package 2026-01-22 12:41:52.078588 | fa163e0d-6f45-64a1-ca76-0000000012a0 | SKIPPED | Manage package | standalone | item=[]: absent 2026-01-22 12:41:52.094238 | fa163e0d-6f45-64a1-ca76-0000000012a0 | SKIPPED | Manage package | standalone | item=[]: installed 2026-01-22 12:41:52.111804 | fa163e0d-6f45-64a1-ca76-0000000012a0 | SKIPPED | Manage package | standalone | item=[]: latest 2026-01-22 12:41:56.701606 | fa163e0d-6f45-64a1-ca76-0000000012a0 | TIMING | tripleo_clients_install : Manage package | standalone | 0:02:50.666595 | 4.69s 2026-01-22 12:41:56.721527 | fa163e0d-6f45-64a1-ca76-0000000012a0 | SKIPPED | Manage package | standalone | item=[]: removed 2026-01-22 12:41:56.729949 | fa163e0d-6f45-64a1-ca76-0000000012a0 | TIMING | tripleo_clients_install : Manage package | standalone | 0:02:50.694923 | 4.72s 2026-01-22 12:41:56.770830 | fa163e0d-6f45-64a1-ca76-000000000ba4 | TASK | create fcontext for rabbitmq data 2026-01-22 12:41:58.139693 | fa163e0d-6f45-64a1-ca76-000000000ba4 | CHANGED | create fcontext for rabbitmq data | standalone 2026-01-22 12:41:58.142338 | fa163e0d-6f45-64a1-ca76-000000000ba4 | TIMING | create fcontext for rabbitmq data | standalone | 0:02:52.107332 | 1.37s 2026-01-22 12:41:58.169198 | fa163e0d-6f45-64a1-ca76-000000000ba5 | TASK | create persistent directories 2026-01-22 12:41:58.485304 | fa163e0d-6f45-64a1-ca76-000000000ba5 | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/rabbitmq', 'setype': 'container_file_t'} 2026-01-22 12:41:58.488765 | fa163e0d-6f45-64a1-ca76-000000000ba5 | TIMING | create persistent directories | standalone | 0:02:52.453748 | 0.32s 2026-01-22 12:41:58.704479 | fa163e0d-6f45-64a1-ca76-000000000ba5 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/rabbitmq', 'setype': 'container_file_t'} 2026-01-22 12:41:58.706219 | fa163e0d-6f45-64a1-ca76-000000000ba5 | TIMING | create persistent directories | standalone | 0:02:52.671218 | 0.54s 2026-01-22 12:41:58.716170 | fa163e0d-6f45-64a1-ca76-000000000ba5 | TIMING | create persistent directories | standalone | 0:02:52.681160 | 0.55s 2026-01-22 12:41:58.742456 | fa163e0d-6f45-64a1-ca76-000000000ba6 | TASK | stop the Erlang port mapper on the host and make sure it cannot bind to the port used by container 2026-01-22 12:41:59.022936 | fa163e0d-6f45-64a1-ca76-000000000ba6 | CHANGED | stop the Erlang port mapper on the host and make sure it cannot bind to the port used by container | standalone 2026-01-22 12:41:59.024778 | fa163e0d-6f45-64a1-ca76-000000000ba6 | TIMING | stop the Erlang port mapper on the host and make sure it cannot bind to the port used by container | standalone | 0:02:52.989780 | 0.28s 2026-01-22 12:41:59.048798 | fa163e0d-6f45-64a1-ca76-000000000ba7 | TASK | Make sure python3-novaclient is installed when IHA is enabled 2026-01-22 12:41:59.077771 | fa163e0d-6f45-64a1-ca76-000000000ba7 | SKIPPED | Make sure python3-novaclient is installed when IHA is enabled | standalone 2026-01-22 12:41:59.079359 | fa163e0d-6f45-64a1-ca76-000000000ba7 | TIMING | Make sure python3-novaclient is installed when IHA is enabled | standalone | 0:02:53.044364 | 0.03s 2026-01-22 12:41:59.103729 | fa163e0d-6f45-64a1-ca76-000000000ba8 | TASK | Remove existing entries from logind conf 2026-01-22 12:41:59.336191 | fa163e0d-6f45-64a1-ca76-000000000ba8 | CHANGED | Remove existing entries from logind conf | standalone 2026-01-22 12:41:59.337602 | fa163e0d-6f45-64a1-ca76-000000000ba8 | TIMING | Remove existing entries from logind conf | standalone | 0:02:53.302612 | 0.23s 2026-01-22 12:41:59.357119 | fa163e0d-6f45-64a1-ca76-000000000ba9 | TASK | Make sure systemd-logind ignores power off 2026-01-22 12:41:59.611274 | fa163e0d-6f45-64a1-ca76-000000000ba9 | CHANGED | Make sure systemd-logind ignores power off | standalone 2026-01-22 12:41:59.613361 | fa163e0d-6f45-64a1-ca76-000000000ba9 | TIMING | Make sure systemd-logind ignores power off | standalone | 0:02:53.578359 | 0.26s 2026-01-22 12:41:59.641212 | fa163e0d-6f45-64a1-ca76-000000000baa | TASK | Restart systemd-logind 2026-01-22 12:42:00.207742 | fa163e0d-6f45-64a1-ca76-000000000baa | CHANGED | Restart systemd-logind | standalone 2026-01-22 12:42:00.209977 | fa163e0d-6f45-64a1-ca76-000000000baa | TIMING | Restart systemd-logind | standalone | 0:02:54.174980 | 0.57s 2026-01-22 12:42:00.236462 | fa163e0d-6f45-64a1-ca76-000000000bab | TASK | Gather service_facts on pacemaker_bootstrap_node [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{(pacemaker_short_bootstrap_node_name|lower == ansible_facts['hostname']|lower)|bool}} 2026-01-22 12:42:00.287855 | fa163e0d-6f45-64a1-ca76-000000000bab | SKIPPED | Gather service_facts on pacemaker_bootstrap_node | standalone 2026-01-22 12:42:00.288829 | fa163e0d-6f45-64a1-ca76-000000000bab | TIMING | Gather service_facts on pacemaker_bootstrap_node | standalone | 0:02:54.253832 | 0.05s 2026-01-22 12:42:00.318352 | fa163e0d-6f45-64a1-ca76-000000000bac | TASK | Check and eventually delete duplicate constraints (bootstrap node) [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{(pacemaker_short_bootstrap_node_name|lower == ansible_facts['hostname']|lower)|bool}} 2026-01-22 12:42:00.366987 | fa163e0d-6f45-64a1-ca76-000000000bac | SKIPPED | Check and eventually delete duplicate constraints (bootstrap node) | standalone 2026-01-22 12:42:00.367667 | fa163e0d-6f45-64a1-ca76-000000000bac | TIMING | Check and eventually delete duplicate constraints (bootstrap node) | standalone | 0:02:54.332664 | 0.05s 2026-01-22 12:42:00.388784 | fa163e0d-6f45-64a1-ca76-000000000bad | TASK | create persistent logs directory 2026-01-22 12:42:01.319974 | fa163e0d-6f45-64a1-ca76-000000000bad | CHANGED | create persistent logs directory | standalone | item={'mode': '0750', 'path': '/var/log/containers/placement', 'setype': 'container_file_t'} 2026-01-22 12:42:01.321548 | fa163e0d-6f45-64a1-ca76-000000000bad | TIMING | create persistent logs directory | standalone | 0:02:55.286552 | 0.93s 2026-01-22 12:42:01.505708 | fa163e0d-6f45-64a1-ca76-000000000bad | CHANGED | create persistent logs directory | standalone | item={'mode': '0750', 'path': '/var/log/containers/httpd/placement', 'setype': 'container_file_t'} 2026-01-22 12:42:01.506794 | fa163e0d-6f45-64a1-ca76-000000000bad | TIMING | create persistent logs directory | standalone | 0:02:55.471805 | 1.12s 2026-01-22 12:42:01.511717 | fa163e0d-6f45-64a1-ca76-000000000bad | TIMING | create persistent logs directory | standalone | 0:02:55.476723 | 1.12s 2026-01-22 12:42:01.539791 | fa163e0d-6f45-64a1-ca76-000000000baf | TASK | Set login facts 2026-01-22 12:42:01.582264 | fa163e0d-6f45-64a1-ca76-000000000baf | OK | Set login facts | standalone 2026-01-22 12:42:01.583962 | fa163e0d-6f45-64a1-ca76-000000000baf | TIMING | Set login facts | standalone | 0:02:55.548964 | 0.04s 2026-01-22 12:42:01.610261 | fa163e0d-6f45-64a1-ca76-000000000bb0 | TASK | Convert logins json to dict 2026-01-22 12:42:01.660852 | fa163e0d-6f45-64a1-ca76-000000000bb0 | SKIPPED | Convert logins json to dict | standalone 2026-01-22 12:42:01.662251 | fa163e0d-6f45-64a1-ca76-000000000bb0 | TIMING | Convert logins json to dict | standalone | 0:02:55.627255 | 0.05s 2026-01-22 12:42:01.688318 | fa163e0d-6f45-64a1-ca76-000000000bb1 | TASK | Set registry logins 2026-01-22 12:42:01.738925 | fa163e0d-6f45-64a1-ca76-000000000bb1 | SKIPPED | Set registry logins | standalone 2026-01-22 12:42:01.740091 | fa163e0d-6f45-64a1-ca76-000000000bb1 | TIMING | Set registry logins | standalone | 0:02:55.705098 | 0.05s 2026-01-22 12:42:01.765093 | fa163e0d-6f45-64a1-ca76-000000000bb2 | TASK | Run podman install 2026-01-22 12:42:01.794855 | fa163e0d-6f45-64a1-ca76-000000000bb2 | TIMING | Run podman install | standalone | 0:02:55.759850 | 0.03s 2026-01-22 12:42:01.866258 | fa163e0d-6f45-64a1-ca76-000000001330 | TASK | Gather variables for each operating system 2026-01-22 12:42:02.025482 | fa163e0d-6f45-64a1-ca76-000000001330 | OK | Gather variables for each operating system | standalone | item=/usr/share/ansible/roles/tripleo_podman/vars/redhat-9.yml 2026-01-22 12:42:02.028230 | fa163e0d-6f45-64a1-ca76-000000001330 | TIMING | tripleo_podman : Gather variables for each operating system | standalone | 0:02:55.993232 | 0.16s 2026-01-22 12:42:02.037978 | fa163e0d-6f45-64a1-ca76-000000001330 | TIMING | tripleo_podman : Gather variables for each operating system | standalone | 0:02:56.002976 | 0.17s 2026-01-22 12:42:02.064423 | fa163e0d-6f45-64a1-ca76-000000001332 | TASK | ensure podman and deps are installed 2026-01-22 12:42:05.058339 | fa163e0d-6f45-64a1-ca76-000000001332 | OK | ensure podman and deps are installed | standalone 2026-01-22 12:42:05.059866 | fa163e0d-6f45-64a1-ca76-000000001332 | TIMING | tripleo_podman : ensure podman and deps are installed | standalone | 0:02:59.024875 | 2.99s 2026-01-22 12:42:05.085254 | fa163e0d-6f45-64a1-ca76-000000001333 | TASK | Ensure we get the ansible interfaces facts 2026-01-22 12:42:05.680132 | fa163e0d-6f45-64a1-ca76-000000001333 | OK | Ensure we get the ansible interfaces facts | standalone 2026-01-22 12:42:05.681359 | fa163e0d-6f45-64a1-ca76-000000001333 | TIMING | tripleo_podman : Ensure we get the ansible interfaces facts | standalone | 0:02:59.646367 | 0.59s 2026-01-22 12:42:05.706816 | fa163e0d-6f45-64a1-ca76-000000001334 | TASK | Delete legacy cni0 interface (podman < 1.6) 2026-01-22 12:42:05.739292 | fa163e0d-6f45-64a1-ca76-000000001334 | SKIPPED | Delete legacy cni0 interface (podman < 1.6) | standalone 2026-01-22 12:42:05.740428 | fa163e0d-6f45-64a1-ca76-000000001334 | TIMING | tripleo_podman : Delete legacy cni0 interface (podman < 1.6) | standalone | 0:02:59.705436 | 0.03s 2026-01-22 12:42:05.766088 | fa163e0d-6f45-64a1-ca76-000000001336 | TASK | Make sure /etc/cni/net.d folder exists 2026-01-22 12:42:05.806103 | fa163e0d-6f45-64a1-ca76-000000001336 | SKIPPED | Make sure /etc/cni/net.d folder exists | standalone 2026-01-22 12:42:05.807213 | fa163e0d-6f45-64a1-ca76-000000001336 | TIMING | tripleo_podman : Make sure /etc/cni/net.d folder exists | standalone | 0:02:59.772220 | 0.04s 2026-01-22 12:42:05.832485 | fa163e0d-6f45-64a1-ca76-000000001337 | TASK | Update default network configuration if possible 2026-01-22 12:42:05.872000 | fa163e0d-6f45-64a1-ca76-000000001337 | SKIPPED | Update default network configuration if possible | standalone 2026-01-22 12:42:05.873187 | fa163e0d-6f45-64a1-ca76-000000001337 | TIMING | tripleo_podman : Update default network configuration if possible | standalone | 0:02:59.838195 | 0.04s 2026-01-22 12:42:05.899469 | fa163e0d-6f45-64a1-ca76-000000001339 | TASK | Make sure /etc/containers/networks exists 2026-01-22 12:42:06.142090 | fa163e0d-6f45-64a1-ca76-000000001339 | OK | Make sure /etc/containers/networks exists | standalone 2026-01-22 12:42:06.143460 | fa163e0d-6f45-64a1-ca76-000000001339 | TIMING | tripleo_podman : Make sure /etc/containers/networks exists | standalone | 0:03:00.108470 | 0.24s 2026-01-22 12:42:06.170057 | fa163e0d-6f45-64a1-ca76-00000000133a | TASK | Get current podman network 2026-01-22 12:42:06.498525 | fa163e0d-6f45-64a1-ca76-00000000133a | CHANGED | Get current podman network | standalone 2026-01-22 12:42:06.499961 | fa163e0d-6f45-64a1-ca76-00000000133a | TIMING | tripleo_podman : Get current podman network | standalone | 0:03:00.464969 | 0.33s 2026-01-22 12:42:06.525636 | fa163e0d-6f45-64a1-ca76-00000000133b | TASK | Update the default network configuration 2026-01-22 12:42:07.140474 | fa163e0d-6f45-64a1-ca76-00000000133b | CHANGED | Update the default network configuration | standalone 2026-01-22 12:42:07.142002 | fa163e0d-6f45-64a1-ca76-00000000133b | TIMING | tripleo_podman : Update the default network configuration | standalone | 0:03:01.107010 | 0.61s 2026-01-22 12:42:07.167582 | fa163e0d-6f45-64a1-ca76-00000000133c | TASK | Write containers registries.conf 2026-01-22 12:42:07.720391 | fa163e0d-6f45-64a1-ca76-00000000133c | CHANGED | Write containers registries.conf | standalone 2026-01-22 12:42:07.721866 | fa163e0d-6f45-64a1-ca76-00000000133c | TIMING | tripleo_podman : Write containers registries.conf | standalone | 0:03:01.686873 | 0.55s 2026-01-22 12:42:07.748408 | fa163e0d-6f45-64a1-ca76-00000000133d | TASK | Write containers.conf 2026-01-22 12:42:07.992997 | fa163e0d-6f45-64a1-ca76-00000000133d | CHANGED | Write containers.conf | standalone | item={'section': 'containers', 'option': 'pids_limit', 'value': '4096'} 2026-01-22 12:42:07.995214 | fa163e0d-6f45-64a1-ca76-00000000133d | TIMING | tripleo_podman : Write containers.conf | standalone | 0:03:01.960212 | 0.25s 2026-01-22 12:42:08.167571 | fa163e0d-6f45-64a1-ca76-00000000133d | CHANGED | Write containers.conf | standalone | item={'section': 'engine', 'option': 'events_logger', 'value': '"journald"'} 2026-01-22 12:42:08.169407 | fa163e0d-6f45-64a1-ca76-00000000133d | TIMING | tripleo_podman : Write containers.conf | standalone | 0:03:02.134417 | 0.42s 2026-01-22 12:42:08.331825 | fa163e0d-6f45-64a1-ca76-00000000133d | CHANGED | Write containers.conf | standalone | item={'section': 'engine', 'option': 'runtime', 'value': '"crun"'} 2026-01-22 12:42:08.333756 | fa163e0d-6f45-64a1-ca76-00000000133d | TIMING | tripleo_podman : Write containers.conf | standalone | 0:03:02.298764 | 0.58s 2026-01-22 12:42:08.490387 | fa163e0d-6f45-64a1-ca76-00000000133d | CHANGED | Write containers.conf | standalone | item={'section': 'network', 'option': 'network_backend', 'value': '"netavark"'} 2026-01-22 12:42:08.491453 | fa163e0d-6f45-64a1-ca76-00000000133d | TIMING | tripleo_podman : Write containers.conf | standalone | 0:03:02.456464 | 0.74s 2026-01-22 12:42:08.498948 | fa163e0d-6f45-64a1-ca76-00000000133d | TIMING | tripleo_podman : Write containers.conf | standalone | 0:03:02.463952 | 0.75s 2026-01-22 12:42:08.525177 | fa163e0d-6f45-64a1-ca76-00000000133e | TASK | Enable podman.socket service 2026-01-22 12:42:08.553618 | fa163e0d-6f45-64a1-ca76-00000000133e | SKIPPED | Enable podman.socket service | standalone 2026-01-22 12:42:08.554784 | fa163e0d-6f45-64a1-ca76-00000000133e | TIMING | tripleo_podman : Enable podman.socket service | standalone | 0:03:02.519793 | 0.03s 2026-01-22 12:42:08.601547 | fa163e0d-6f45-64a1-ca76-000000000bb3 | TASK | Run podman login 2026-01-22 12:42:08.628604 | fa163e0d-6f45-64a1-ca76-000000000bb3 | TIMING | Run podman login | standalone | 0:03:02.593605 | 0.03s 2026-01-22 12:42:08.704901 | fa163e0d-6f45-64a1-ca76-00000000143e | TASK | Perform container registry login(s) with podman 2026-01-22 12:42:08.731094 | fa163e0d-6f45-64a1-ca76-00000000143e | TIMING | tripleo_podman : Perform container registry login(s) with podman | standalone | 0:03:02.696103 | 0.03s 2026-01-22 12:42:08.767905 | fa163e0d-6f45-64a1-ca76-000000000bb5 | TASK | Configure tripleo_container_manage to generate systemd drop-in dependencies 2026-01-22 12:42:09.207904 | fa163e0d-6f45-64a1-ca76-000000000bb5 | CHANGED | Configure tripleo_container_manage to generate systemd drop-in dependencies | standalone 2026-01-22 12:42:09.209178 | fa163e0d-6f45-64a1-ca76-000000000bb5 | TIMING | Configure tripleo_container_manage to generate systemd drop-in dependencies | standalone | 0:03:03.174186 | 0.44s 2026-01-22 12:42:09.233832 | fa163e0d-6f45-64a1-ca76-000000000bb6 | TASK | create persistent directories 2026-01-22 12:42:09.464128 | fa163e0d-6f45-64a1-ca76-000000000bb6 | CHANGED | create persistent directories | standalone | item={'path': '/var/lib/redis', 'setype': 'container_file_t'} 2026-01-22 12:42:09.466357 | fa163e0d-6f45-64a1-ca76-000000000bb6 | TIMING | create persistent directories | standalone | 0:03:03.431358 | 0.23s 2026-01-22 12:42:09.654304 | fa163e0d-6f45-64a1-ca76-000000000bb6 | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/redis', 'setype': 'container_file_t'} 2026-01-22 12:42:09.655488 | fa163e0d-6f45-64a1-ca76-000000000bb6 | TIMING | create persistent directories | standalone | 0:03:03.620498 | 0.42s 2026-01-22 12:42:09.837842 | fa163e0d-6f45-64a1-ca76-000000000bb6 | CHANGED | create persistent directories | standalone | item={'path': '/run/redis', 'setype': 'container_file_t'} 2026-01-22 12:42:09.839635 | fa163e0d-6f45-64a1-ca76-000000000bb6 | TIMING | create persistent directories | standalone | 0:03:03.804646 | 0.60s 2026-01-22 12:42:09.850541 | fa163e0d-6f45-64a1-ca76-000000000bb6 | TIMING | create persistent directories | standalone | 0:03:03.815545 | 0.62s 2026-01-22 12:42:09.875769 | fa163e0d-6f45-64a1-ca76-000000000bb7 | TASK | ensure /run/redis is present upon reboot 2026-01-22 12:42:10.264902 | fa163e0d-6f45-64a1-ca76-000000000bb7 | CHANGED | ensure /run/redis is present upon reboot | standalone 2026-01-22 12:42:10.266399 | fa163e0d-6f45-64a1-ca76-000000000bb7 | TIMING | ensure /run/redis is present upon reboot | standalone | 0:03:04.231408 | 0.39s 2026-01-22 12:42:10.291919 | fa163e0d-6f45-64a1-ca76-000000000bb8 | TASK | Ensure rsyslog is installed 2026-01-22 12:42:13.275857 | fa163e0d-6f45-64a1-ca76-000000000bb8 | OK | Ensure rsyslog is installed | standalone 2026-01-22 12:42:13.277318 | fa163e0d-6f45-64a1-ca76-000000000bb8 | TIMING | Ensure rsyslog is installed | standalone | 0:03:07.242326 | 2.98s 2026-01-22 12:42:13.304762 | fa163e0d-6f45-64a1-ca76-000000000bb9 | TASK | Ensure rsyslog is enabled and started 2026-01-22 12:42:13.686179 | fa163e0d-6f45-64a1-ca76-000000000bb9 | OK | Ensure rsyslog is enabled and started | standalone 2026-01-22 12:42:13.688333 | fa163e0d-6f45-64a1-ca76-000000000bb9 | TIMING | Ensure rsyslog is enabled and started | standalone | 0:03:07.653341 | 0.38s 2026-01-22 12:42:13.743757 | fa163e0d-6f45-64a1-ca76-000000000bba | TIMING | include_role : tripleo_sshd | standalone | 0:03:07.708755 | 0.03s 2026-01-22 12:42:13.831144 | fa163e0d-6f45-64a1-ca76-000000001559 | TASK | Gather variables for each operating system 2026-01-22 12:42:13.950782 | fa163e0d-6f45-64a1-ca76-000000001559 | OK | Gather variables for each operating system | standalone | item=/usr/share/ansible/roles/tripleo_sshd/vars/redhat.yml 2026-01-22 12:42:13.952639 | fa163e0d-6f45-64a1-ca76-000000001559 | TIMING | tripleo_sshd : Gather variables for each operating system | standalone | 0:03:07.917640 | 0.12s 2026-01-22 12:42:13.964846 | fa163e0d-6f45-64a1-ca76-000000001559 | TIMING | tripleo_sshd : Gather variables for each operating system | standalone | 0:03:07.929845 | 0.13s 2026-01-22 12:42:13.990310 | fa163e0d-6f45-64a1-ca76-00000000155b | TASK | Install the OpenSSH server 2026-01-22 12:42:17.531198 | fa163e0d-6f45-64a1-ca76-00000000155b | OK | Install the OpenSSH server | standalone 2026-01-22 12:42:17.532766 | fa163e0d-6f45-64a1-ca76-00000000155b | TIMING | tripleo_sshd : Install the OpenSSH server | standalone | 0:03:11.497770 | 3.54s 2026-01-22 12:42:17.558896 | fa163e0d-6f45-64a1-ca76-00000000155c | TASK | Start sshd 2026-01-22 12:42:17.588538 | fa163e0d-6f45-64a1-ca76-00000000155c | SKIPPED | Start sshd | standalone 2026-01-22 12:42:17.589930 | fa163e0d-6f45-64a1-ca76-00000000155c | TIMING | tripleo_sshd : Start sshd | standalone | 0:03:11.554936 | 0.03s 2026-01-22 12:42:17.616318 | fa163e0d-6f45-64a1-ca76-00000000155d | TASK | PasswordAuthentication notice 2026-01-22 12:42:17.645457 | fa163e0d-6f45-64a1-ca76-00000000155d | OK | PasswordAuthentication notice | standalone | result={ "changed": false, "msg": "Notice - The option `tripleo_sshd_password_authentication` is set to \"yes\" but `PermitRootLogin` is undefined. While this may be perfectly valid, the sshd_config options should be reviewed to ensure general user access is functional and meeting expectations." } 2026-01-22 12:42:17.646604 | fa163e0d-6f45-64a1-ca76-00000000155d | TIMING | tripleo_sshd : PasswordAuthentication notice | standalone | 0:03:11.611613 | 0.03s 2026-01-22 12:42:17.666352 | fa163e0d-6f45-64a1-ca76-00000000155e | TASK | PasswordAuthentication duplication notice 2026-01-22 12:42:17.693489 | fa163e0d-6f45-64a1-ca76-00000000155e | SKIPPED | PasswordAuthentication duplication notice | standalone 2026-01-22 12:42:17.694403 | fa163e0d-6f45-64a1-ca76-00000000155e | TIMING | tripleo_sshd : PasswordAuthentication duplication notice | standalone | 0:03:11.659418 | 0.03s 2026-01-22 12:42:17.712154 | fa163e0d-6f45-64a1-ca76-00000000155f | TASK | Motd duplication notice 2026-01-22 12:42:17.736482 | fa163e0d-6f45-64a1-ca76-00000000155f | OK | Motd duplication notice | standalone | result={ "changed": false, "msg": "WARNING - The Banner or PrintMotd has been configured in `tripleo_sshd_server_options`. These options may be ignored and configured using values from `tripleo_sshd_banner_enabled` and `tripleo_sshd_motd_enabled`" } 2026-01-22 12:42:17.737055 | fa163e0d-6f45-64a1-ca76-00000000155f | TIMING | tripleo_sshd : Motd duplication notice | standalone | 0:03:11.702074 | 0.02s 2026-01-22 12:42:17.753794 | fa163e0d-6f45-64a1-ca76-000000001560 | TASK | Configure the banner text 2026-01-22 12:42:17.778838 | fa163e0d-6f45-64a1-ca76-000000001560 | SKIPPED | Configure the banner text | standalone 2026-01-22 12:42:17.779576 | fa163e0d-6f45-64a1-ca76-000000001560 | TIMING | tripleo_sshd : Configure the banner text | standalone | 0:03:11.744593 | 0.03s 2026-01-22 12:42:17.796675 | fa163e0d-6f45-64a1-ca76-000000001561 | TASK | Configure the motd banner 2026-01-22 12:42:17.823919 | fa163e0d-6f45-64a1-ca76-000000001561 | SKIPPED | Configure the motd banner | standalone 2026-01-22 12:42:17.825191 | fa163e0d-6f45-64a1-ca76-000000001561 | TIMING | tripleo_sshd : Configure the motd banner | standalone | 0:03:11.790205 | 0.03s 2026-01-22 12:42:17.845952 | fa163e0d-6f45-64a1-ca76-000000001562 | TASK | Update sshd configuration options from vars 2026-01-22 12:42:17.875833 | fa163e0d-6f45-64a1-ca76-000000001562 | OK | Update sshd configuration options from vars | standalone 2026-01-22 12:42:17.877324 | fa163e0d-6f45-64a1-ca76-000000001562 | TIMING | tripleo_sshd : Update sshd configuration options from vars | standalone | 0:03:11.842331 | 0.03s 2026-01-22 12:42:17.897913 | fa163e0d-6f45-64a1-ca76-000000001563 | TASK | Adjust ssh server configuration 2026-01-22 12:42:18.403570 | fa163e0d-6f45-64a1-ca76-000000001563 | CHANGED | Adjust ssh server configuration | standalone 2026-01-22 12:42:18.404405 | fa163e0d-6f45-64a1-ca76-000000001563 | TIMING | tripleo_sshd : Adjust ssh server configuration | standalone | 0:03:12.369421 | 0.51s 2026-01-22 12:42:18.429022 | fa163e0d-6f45-64a1-ca76-000000001564 | TASK | Restart sshd 2026-01-22 12:42:18.849327 | fa163e0d-6f45-64a1-ca76-000000001564 | CHANGED | Restart sshd | standalone 2026-01-22 12:42:18.850409 | fa163e0d-6f45-64a1-ca76-000000001564 | TIMING | tripleo_sshd : Restart sshd | standalone | 0:03:12.815426 | 0.42s 2026-01-22 12:42:18.900619 | fa163e0d-6f45-64a1-ca76-000000000bbb | TASK | create persistent directories 2026-01-22 12:42:19.145203 | fa163e0d-6f45-64a1-ca76-000000000bbb | CHANGED | create persistent directories | standalone | item={'path': '/srv/node', 'setype': 'container_file_t'} 2026-01-22 12:42:19.147632 | fa163e0d-6f45-64a1-ca76-000000000bbb | TIMING | create persistent directories | standalone | 0:03:13.112625 | 0.25s 2026-01-22 12:42:19.358293 | fa163e0d-6f45-64a1-ca76-000000000bbb | CHANGED | create persistent directories | standalone | item={'path': '/var/log/swift', 'setype': 'container_file_t'} 2026-01-22 12:42:19.360290 | fa163e0d-6f45-64a1-ca76-000000000bbb | TIMING | create persistent directories | standalone | 0:03:13.325294 | 0.46s 2026-01-22 12:42:19.559511 | fa163e0d-6f45-64a1-ca76-000000000bbb | CHANGED | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/swift', 'setype': 'container_file_t'} 2026-01-22 12:42:19.560715 | fa163e0d-6f45-64a1-ca76-000000000bbb | TIMING | create persistent directories | standalone | 0:03:13.525725 | 0.66s 2026-01-22 12:42:19.573521 | fa163e0d-6f45-64a1-ca76-000000000bbb | TIMING | create persistent directories | standalone | 0:03:13.538521 | 0.67s 2026-01-22 12:42:19.600749 | fa163e0d-6f45-64a1-ca76-000000000bbc | TASK | create persistent directories 2026-01-22 12:42:19.864083 | fa163e0d-6f45-64a1-ca76-000000000bbc | OK | create persistent directories | standalone | item={'path': '/srv/node', 'setype': 'container_file_t'} 2026-01-22 12:42:19.865712 | fa163e0d-6f45-64a1-ca76-000000000bbc | TIMING | create persistent directories | standalone | 0:03:13.830714 | 0.26s 2026-01-22 12:42:20.090773 | fa163e0d-6f45-64a1-ca76-000000000bbc | CHANGED | create persistent directories | standalone | item={'path': '/var/cache/swift', 'setype': 'container_file_t'} 2026-01-22 12:42:20.092682 | fa163e0d-6f45-64a1-ca76-000000000bbc | TIMING | create persistent directories | standalone | 0:03:14.057656 | 0.49s 2026-01-22 12:42:20.270588 | fa163e0d-6f45-64a1-ca76-000000000bbc | OK | create persistent directories | standalone | item={'mode': '0750', 'path': '/var/log/containers/swift', 'setype': 'container_file_t'} 2026-01-22 12:42:20.271772 | fa163e0d-6f45-64a1-ca76-000000000bbc | TIMING | create persistent directories | standalone | 0:03:14.236780 | 0.67s 2026-01-22 12:42:20.284236 | fa163e0d-6f45-64a1-ca76-000000000bbc | TIMING | create persistent directories | standalone | 0:03:14.249233 | 0.68s 2026-01-22 12:42:20.309516 | fa163e0d-6f45-64a1-ca76-000000000bbd | TASK | Set swift_use_local_disks fact 2026-01-22 12:42:20.340515 | fa163e0d-6f45-64a1-ca76-000000000bbd | OK | Set swift_use_local_disks fact | standalone 2026-01-22 12:42:20.341816 | fa163e0d-6f45-64a1-ca76-000000000bbd | TIMING | Set swift_use_local_disks fact | standalone | 0:03:14.306822 | 0.03s 2026-01-22 12:42:20.365838 | fa163e0d-6f45-64a1-ca76-000000000bbe | TASK | Set use_node_data_lookup fact 2026-01-22 12:42:20.395317 | fa163e0d-6f45-64a1-ca76-000000000bbe | OK | Set use_node_data_lookup fact | standalone 2026-01-22 12:42:20.396677 | fa163e0d-6f45-64a1-ca76-000000000bbe | TIMING | Set use_node_data_lookup fact | standalone | 0:03:14.361655 | 0.03s 2026-01-22 12:42:20.421737 | fa163e0d-6f45-64a1-ca76-000000000bbf | TASK | Create Swift d1 directory if needed 2026-01-22 12:42:20.624068 | fa163e0d-6f45-64a1-ca76-000000000bbf | CHANGED | Create Swift d1 directory if needed | standalone 2026-01-22 12:42:20.625568 | fa163e0d-6f45-64a1-ca76-000000000bbf | TIMING | Create Swift d1 directory if needed | standalone | 0:03:14.590574 | 0.20s 2026-01-22 12:42:20.651695 | fa163e0d-6f45-64a1-ca76-000000000bc0 | TASK | Set fact for SwiftRawDisks 2026-01-22 12:42:20.690473 | fa163e0d-6f45-64a1-ca76-000000000bc0 | OK | Set fact for SwiftRawDisks | standalone 2026-01-22 12:42:20.691807 | fa163e0d-6f45-64a1-ca76-000000000bc0 | TIMING | Set fact for SwiftRawDisks | standalone | 0:03:14.656812 | 0.04s 2026-01-22 12:42:20.716814 | fa163e0d-6f45-64a1-ca76-000000000bc1 | TASK | Get hiera swift::storage::disks::args 2026-01-22 12:42:20.751108 | fa163e0d-6f45-64a1-ca76-000000000bc1 | SKIPPED | Get hiera swift::storage::disks::args | standalone 2026-01-22 12:42:20.752426 | fa163e0d-6f45-64a1-ca76-000000000bc1 | TIMING | Get hiera swift::storage::disks::args | standalone | 0:03:14.717432 | 0.03s 2026-01-22 12:42:20.777281 | fa163e0d-6f45-64a1-ca76-000000000bc2 | TASK | Set fact for swift_raw_disks 2026-01-22 12:42:20.806888 | fa163e0d-6f45-64a1-ca76-000000000bc2 | SKIPPED | Set fact for swift_raw_disks | standalone 2026-01-22 12:42:20.808246 | fa163e0d-6f45-64a1-ca76-000000000bc2 | TIMING | Set fact for swift_raw_disks | standalone | 0:03:14.773252 | 0.03s 2026-01-22 12:42:20.833907 | fa163e0d-6f45-64a1-ca76-000000000bc3 | TASK | Format SwiftRawDisks 2026-01-22 12:42:20.862792 | fa163e0d-6f45-64a1-ca76-000000000bc3 | TIMING | Format SwiftRawDisks | standalone | 0:03:14.827790 | 0.03s 2026-01-22 12:42:20.888982 | fa163e0d-6f45-64a1-ca76-000000000bc4 | TASK | Refresh facts if SwiftRawDisks is set to get uuids if newly created partitions 2026-01-22 12:42:20.941043 | fa163e0d-6f45-64a1-ca76-000000000bc4 | SKIPPED | Refresh facts if SwiftRawDisks is set to get uuids if newly created partitions | standalone 2026-01-22 12:42:20.942507 | fa163e0d-6f45-64a1-ca76-000000000bc4 | TIMING | Refresh facts if SwiftRawDisks is set to get uuids if newly created partitions | standalone | 0:03:14.907508 | 0.05s 2026-01-22 12:42:20.967932 | fa163e0d-6f45-64a1-ca76-000000000bc5 | TASK | Mount devices defined in SwiftRawDisks 2026-01-22 12:42:20.995640 | fa163e0d-6f45-64a1-ca76-000000000bc5 | TIMING | Mount devices defined in SwiftRawDisks | standalone | 0:03:14.960638 | 0.03s 2026-01-22 12:42:21.017852 | fa163e0d-6f45-64a1-ca76-000000000bc6 | TASK | Check for NTP service 2026-01-22 12:42:21.228120 | fa163e0d-6f45-64a1-ca76-000000000bc6 | CHANGED | Check for NTP service | standalone 2026-01-22 12:42:21.229351 | fa163e0d-6f45-64a1-ca76-000000000bc6 | TIMING | Check for NTP service | standalone | 0:03:15.194364 | 0.21s 2026-01-22 12:42:21.248998 | fa163e0d-6f45-64a1-ca76-000000000bc7 | TASK | Disable NTP before configuring Chrony 2026-01-22 12:42:21.289312 | fa163e0d-6f45-64a1-ca76-000000000bc7 | SKIPPED | Disable NTP before configuring Chrony | standalone 2026-01-22 12:42:21.290411 | fa163e0d-6f45-64a1-ca76-000000000bc7 | TIMING | Disable NTP before configuring Chrony | standalone | 0:03:15.255424 | 0.04s 2026-01-22 12:42:21.310747 | fa163e0d-6f45-64a1-ca76-000000000bc8 | TASK | Install, Configure and Run Chrony 2026-01-22 12:42:21.339498 | fa163e0d-6f45-64a1-ca76-000000000bc8 | TIMING | Install, Configure and Run Chrony | standalone | 0:03:15.304501 | 0.03s 2026-01-22 12:42:21.411917 | fa163e0d-6f45-64a1-ca76-0000000016f5 | TASK | Load distro-specific variables 2026-01-22 12:42:21.439791 | fa163e0d-6f45-64a1-ca76-0000000016f5 | TIMING | chrony : Load distro-specific variables | standalone | 0:03:15.404794 | 0.03s 2026-01-22 12:42:21.448638 | 543cc2da-a497-46a4-914c-6547a6323c2c | INCLUDED | /usr/share/ansible/roles/chrony/tasks/vars.yml | standalone 2026-01-22 12:42:21.470556 | fa163e0d-6f45-64a1-ca76-0000000017da | TASK | Load distro-specific variables 2026-01-22 12:42:21.527063 | fa163e0d-6f45-64a1-ca76-0000000017da | OK | Load distro-specific variables | standalone | item=/usr/share/ansible/roles/chrony/vars/RedHat.yml 2026-01-22 12:42:21.529044 | fa163e0d-6f45-64a1-ca76-0000000017da | TIMING | chrony : Load distro-specific variables | standalone | 0:03:15.494056 | 0.06s 2026-01-22 12:42:21.539594 | fa163e0d-6f45-64a1-ca76-0000000017da | TIMING | chrony : Load distro-specific variables | standalone | 0:03:15.504591 | 0.07s 2026-01-22 12:42:21.565637 | fa163e0d-6f45-64a1-ca76-0000000016f6 | TASK | Install chronyd 2026-01-22 12:42:21.595734 | fa163e0d-6f45-64a1-ca76-0000000016f6 | TIMING | chrony : Install chronyd | standalone | 0:03:15.560727 | 0.03s 2026-01-22 12:42:21.619931 | b7996ed1-93b9-4417-8436-ae9dd38b8e58 | INCLUDED | /usr/share/ansible/roles/chrony/tasks/install.yml | standalone 2026-01-22 12:42:21.653412 | fa163e0d-6f45-64a1-ca76-000000001813 | TASK | Load distro-specific variables 2026-01-22 12:42:21.679720 | fa163e0d-6f45-64a1-ca76-000000001813 | TIMING | chrony : Load distro-specific variables | standalone | 0:03:15.644727 | 0.03s 2026-01-22 12:42:21.691167 | c139ac87-79f2-4eec-a343-1dc3667dc523 | INCLUDED | /usr/share/ansible/roles/chrony/tasks/vars.yml | standalone 2026-01-22 12:42:21.716994 | fa163e0d-6f45-64a1-ca76-00000000185a | TASK | Load distro-specific variables 2026-01-22 12:42:21.755124 | fa163e0d-6f45-64a1-ca76-00000000185a | OK | Load distro-specific variables | standalone | item=/usr/share/ansible/roles/chrony/vars/RedHat.yml 2026-01-22 12:42:21.758061 | fa163e0d-6f45-64a1-ca76-00000000185a | TIMING | chrony : Load distro-specific variables | standalone | 0:03:15.723063 | 0.04s 2026-01-22 12:42:21.768064 | fa163e0d-6f45-64a1-ca76-00000000185a | TIMING | chrony : Load distro-specific variables | standalone | 0:03:15.733057 | 0.05s 2026-01-22 12:42:21.793891 | fa163e0d-6f45-64a1-ca76-000000001814 | TASK | Check for NTP service 2026-01-22 12:42:22.036109 | fa163e0d-6f45-64a1-ca76-000000001814 | CHANGED | Check for NTP service | standalone 2026-01-22 12:42:22.037696 | fa163e0d-6f45-64a1-ca76-000000001814 | TIMING | chrony : Check for NTP service | standalone | 0:03:16.002701 | 0.24s 2026-01-22 12:42:22.063283 | fa163e0d-6f45-64a1-ca76-000000001815 | TASK | Disable NTP before configuring Chrony 2026-01-22 12:42:22.113645 | fa163e0d-6f45-64a1-ca76-000000001815 | SKIPPED | Disable NTP before configuring Chrony | standalone 2026-01-22 12:42:22.115037 | fa163e0d-6f45-64a1-ca76-000000001815 | TIMING | chrony : Disable NTP before configuring Chrony | standalone | 0:03:16.080044 | 0.05s 2026-01-22 12:42:22.137302 | fa163e0d-6f45-64a1-ca76-000000001816 | TASK | Install chronyd package 2026-01-22 12:42:25.013421 | fa163e0d-6f45-64a1-ca76-000000001816 | OK | Install chronyd package | standalone 2026-01-22 12:42:25.014933 | fa163e0d-6f45-64a1-ca76-000000001816 | TIMING | chrony : Install chronyd package | standalone | 0:03:18.979941 | 2.88s 2026-01-22 12:42:25.040055 | fa163e0d-6f45-64a1-ca76-0000000016f7 | TASK | Upgrade chronyd 2026-01-22 12:42:25.068640 | fa163e0d-6f45-64a1-ca76-0000000016f7 | SKIPPED | Upgrade chronyd | standalone 2026-01-22 12:42:25.069842 | fa163e0d-6f45-64a1-ca76-0000000016f7 | TIMING | chrony : Upgrade chronyd | standalone | 0:03:19.034849 | 0.03s 2026-01-22 12:42:25.095013 | fa163e0d-6f45-64a1-ca76-0000000016f8 | TASK | Configure chronyd 2026-01-22 12:42:25.123903 | fa163e0d-6f45-64a1-ca76-0000000016f8 | TIMING | chrony : Configure chronyd | standalone | 0:03:19.088902 | 0.03s 2026-01-22 12:42:25.144571 | 6d6fb64f-9f3e-4c0e-8696-adcc042956da | INCLUDED | /usr/share/ansible/roles/chrony/tasks/config.yml | standalone 2026-01-22 12:42:25.181357 | fa163e0d-6f45-64a1-ca76-000000001893 | TASK | Load distro-specific variables 2026-01-22 12:42:25.207792 | fa163e0d-6f45-64a1-ca76-000000001893 | TIMING | chrony : Load distro-specific variables | standalone | 0:03:19.172803 | 0.03s 2026-01-22 12:42:25.218201 | 9bc67b96-41f2-4d66-ba70-bc4e7eb34689 | INCLUDED | /usr/share/ansible/roles/chrony/tasks/vars.yml | standalone 2026-01-22 12:42:25.240417 | fa163e0d-6f45-64a1-ca76-0000000018ff | TASK | Load distro-specific variables 2026-01-22 12:42:25.296491 | fa163e0d-6f45-64a1-ca76-0000000018ff | OK | Load distro-specific variables | standalone | item=/usr/share/ansible/roles/chrony/vars/RedHat.yml 2026-01-22 12:42:25.298410 | fa163e0d-6f45-64a1-ca76-0000000018ff | TIMING | chrony : Load distro-specific variables | standalone | 0:03:19.263410 | 0.06s 2026-01-22 12:42:25.310912 | fa163e0d-6f45-64a1-ca76-0000000018ff | TIMING | chrony : Load distro-specific variables | standalone | 0:03:19.275918 | 0.07s 2026-01-22 12:42:25.335702 | fa163e0d-6f45-64a1-ca76-000000001894 | TASK | Install chrony configuration file 2026-01-22 12:42:25.966066 | fa163e0d-6f45-64a1-ca76-000000001894 | CHANGED | Install chrony configuration file | standalone 2026-01-22 12:42:25.967576 | fa163e0d-6f45-64a1-ca76-000000001894 | TIMING | chrony : Install chrony configuration file | standalone | 0:03:19.932582 | 0.63s 2026-01-22 12:42:25.996084 | fa163e0d-6f45-64a1-ca76-0000000016f9 | TASK | Run chronyd 2026-01-22 12:42:26.028496 | fa163e0d-6f45-64a1-ca76-0000000016f9 | TIMING | chrony : Run chronyd | standalone | 0:03:19.993495 | 0.03s 2026-01-22 12:42:26.051131 | 60409794-5a10-4551-9664-c8b822cb881f | INCLUDED | /usr/share/ansible/roles/chrony/tasks/run.yml | standalone 2026-01-22 12:42:26.093119 | fa163e0d-6f45-64a1-ca76-000000001922 | TASK | Load distro-specific variables 2026-01-22 12:42:26.120677 | fa163e0d-6f45-64a1-ca76-000000001922 | TIMING | chrony : Load distro-specific variables | standalone | 0:03:20.085657 | 0.03s 2026-01-22 12:42:26.131734 | eb58cec1-b9b1-447e-a075-a275ecf77dff | INCLUDED | /usr/share/ansible/roles/chrony/tasks/vars.yml | standalone 2026-01-22 12:42:26.156343 | fa163e0d-6f45-64a1-ca76-0000000019a6 | TASK | Load distro-specific variables 2026-01-22 12:42:26.210411 | fa163e0d-6f45-64a1-ca76-0000000019a6 | OK | Load distro-specific variables | standalone | item=/usr/share/ansible/roles/chrony/vars/RedHat.yml 2026-01-22 12:42:26.211529 | fa163e0d-6f45-64a1-ca76-0000000019a6 | TIMING | chrony : Load distro-specific variables | standalone | 0:03:20.176541 | 0.05s 2026-01-22 12:42:26.216979 | fa163e0d-6f45-64a1-ca76-0000000019a6 | TIMING | chrony : Load distro-specific variables | standalone | 0:03:20.181970 | 0.06s 2026-01-22 12:42:26.243879 | fa163e0d-6f45-64a1-ca76-000000001923 | TASK | Ensure chronyd is running 2026-01-22 12:42:26.680215 | fa163e0d-6f45-64a1-ca76-000000001923 | OK | Ensure chronyd is running | standalone 2026-01-22 12:42:26.682182 | fa163e0d-6f45-64a1-ca76-000000001923 | TIMING | chrony : Ensure chronyd is running | standalone | 0:03:20.647190 | 0.44s 2026-01-22 12:42:26.708516 | fa163e0d-6f45-64a1-ca76-000000001924 | TASK | Force chronyd restart 2026-01-22 12:42:27.211470 | fa163e0d-6f45-64a1-ca76-000000001924 | CHANGED | Force chronyd restart | standalone 2026-01-22 12:42:27.212838 | fa163e0d-6f45-64a1-ca76-000000001924 | TIMING | chrony : Force chronyd restart | standalone | 0:03:21.177851 | 0.50s 2026-01-22 12:42:27.233262 | fa163e0d-6f45-64a1-ca76-0000000016fa | TASK | Enable online service 2026-01-22 12:42:27.265148 | fa163e0d-6f45-64a1-ca76-0000000016fa | TIMING | chrony : Enable online service | standalone | 0:03:21.230138 | 0.03s 2026-01-22 12:42:27.286149 | 371cfa88-231f-4c61-8950-5ad74a3ee664 | INCLUDED | /usr/share/ansible/roles/chrony/tasks/online.yml | standalone 2026-01-22 12:42:27.327992 | fa163e0d-6f45-64a1-ca76-0000000019d3 | TASK | Create chrony-online.service unit file 2026-01-22 12:42:27.778245 | fa163e0d-6f45-64a1-ca76-0000000019d3 | CHANGED | Create chrony-online.service unit file | standalone 2026-01-22 12:42:27.779687 | fa163e0d-6f45-64a1-ca76-0000000019d3 | TIMING | chrony : Create chrony-online.service unit file | standalone | 0:03:21.744662 | 0.45s 2026-01-22 12:42:27.806350 | fa163e0d-6f45-64a1-ca76-0000000019d4 | TASK | Enable chrony-online.service 2026-01-22 12:42:28.699568 | fa163e0d-6f45-64a1-ca76-0000000019d4 | CHANGED | Enable chrony-online.service | standalone 2026-01-22 12:42:28.701934 | fa163e0d-6f45-64a1-ca76-0000000019d4 | TIMING | chrony : Enable chrony-online.service | standalone | 0:03:22.666938 | 0.89s 2026-01-22 12:42:28.729350 | fa163e0d-6f45-64a1-ca76-0000000016fb | TASK | Sync chronyc 2026-01-22 12:42:28.758758 | fa163e0d-6f45-64a1-ca76-0000000016fb | TIMING | chrony : Sync chronyc | standalone | 0:03:22.723757 | 0.03s 2026-01-22 12:42:28.779779 | 346bf41c-95c9-4091-bb10-de57dc538a92 | INCLUDED | /usr/share/ansible/roles/chrony/tasks/sync.yml | standalone 2026-01-22 12:42:28.824863 | fa163e0d-6f45-64a1-ca76-000000001a79 | TASK | Force NTP sync 2026-01-22 12:42:29.069500 | fa163e0d-6f45-64a1-ca76-000000001a79 | CHANGED | Force NTP sync | standalone 2026-01-22 12:42:29.071064 | fa163e0d-6f45-64a1-ca76-000000001a79 | TIMING | chrony : Force NTP sync | standalone | 0:03:23.036071 | 0.25s 2026-01-22 12:42:29.097739 | fa163e0d-6f45-64a1-ca76-000000001a7a | TASK | Ensure system is NTP time synced 2026-01-22 12:42:39.353509 | fa163e0d-6f45-64a1-ca76-000000001a7a | CHANGED | Ensure system is NTP time synced | standalone 2026-01-22 12:42:39.355016 | fa163e0d-6f45-64a1-ca76-000000001a7a | TIMING | chrony : Ensure system is NTP time synced | standalone | 0:03:33.320019 | 10.26s 2026-01-22 12:42:39.402760 | fa163e0d-6f45-64a1-ca76-000000000bc9 | TASK | Force NTP sync 2026-01-22 12:42:39.633043 | fa163e0d-6f45-64a1-ca76-000000000bc9 | CHANGED | Force NTP sync | standalone 2026-01-22 12:42:39.634089 | fa163e0d-6f45-64a1-ca76-000000000bc9 | TIMING | Force NTP sync | standalone | 0:03:33.599101 | 0.23s 2026-01-22 12:42:39.655296 | fa163e0d-6f45-64a1-ca76-000000000bca | TASK | Ensure system is NTP time synced 2026-01-22 12:42:39.865788 | fa163e0d-6f45-64a1-ca76-000000000bca | CHANGED | Ensure system is NTP time synced | standalone 2026-01-22 12:42:39.867228 | fa163e0d-6f45-64a1-ca76-000000000bca | TIMING | Ensure system is NTP time synced | standalone | 0:03:33.832233 | 0.21s 2026-01-22 12:42:39.893610 | fa163e0d-6f45-64a1-ca76-000000000bcb | TASK | Run timezone role 2026-01-22 12:42:39.923355 | fa163e0d-6f45-64a1-ca76-000000000bcb | TIMING | Run timezone role | standalone | 0:03:33.888349 | 0.03s 2026-01-22 12:42:40.028462 | fa163e0d-6f45-64a1-ca76-000000001c3a | TASK | Set timezone 2026-01-22 12:42:40.521280 | fa163e0d-6f45-64a1-ca76-000000001c3a | OK | Set timezone | standalone 2026-01-22 12:42:40.522627 | fa163e0d-6f45-64a1-ca76-000000001c3a | TIMING | tripleo_timezone : Set timezone | standalone | 0:03:34.487635 | 0.49s 2026-01-22 12:42:40.548608 | fa163e0d-6f45-64a1-ca76-000000001c3b | TASK | Restart time services 2026-01-22 12:42:40.587319 | fa163e0d-6f45-64a1-ca76-000000001c3b | SKIPPED | Restart time services | standalone | item=rsyslog 2026-01-22 12:42:40.593313 | fa163e0d-6f45-64a1-ca76-000000001c3b | SKIPPED | Restart time services | standalone | item=crond 2026-01-22 12:42:40.606472 | fa163e0d-6f45-64a1-ca76-000000001c3b | TIMING | tripleo_timezone : Restart time services | standalone | 0:03:34.571470 | 0.06s 2026-01-22 12:42:40.660887 | fa163e0d-6f45-64a1-ca76-000000000bcc | TASK | install tmpwatch on the host 2026-01-22 12:42:43.518005 | fa163e0d-6f45-64a1-ca76-000000000bcc | OK | install tmpwatch on the host | standalone 2026-01-22 12:42:43.519340 | fa163e0d-6f45-64a1-ca76-000000000bcc | TIMING | install tmpwatch on the host | standalone | 0:03:37.484347 | 2.86s 2026-01-22 12:42:43.543901 | fa163e0d-6f45-64a1-ca76-000000000bcd | TASK | Notice - ctlplane subnet is set 2026-01-22 12:42:43.565784 | fa163e0d-6f45-64a1-ca76-000000000bcd | OK | Notice - ctlplane subnet is set | standalone | result={ "changed": false, "msg": "CIDRs found in the ctlplane network tags.\n" } 2026-01-22 12:42:43.566966 | fa163e0d-6f45-64a1-ca76-000000000bcd | TIMING | Notice - ctlplane subnet is set | standalone | 0:03:37.531976 | 0.02s 2026-01-22 12:42:43.621797 | fa163e0d-6f45-64a1-ca76-000000000bce | TIMING | include_role : tuned | standalone | 0:03:37.586797 | 0.03s 2026-01-22 12:42:43.725182 | fa163e0d-6f45-64a1-ca76-000000001e6e | TASK | Gather variables for each operating system 2026-01-22 12:42:43.842058 | fa163e0d-6f45-64a1-ca76-000000001e6e | OK | Gather variables for each operating system | standalone | item=/usr/share/ansible/roles/tuned/vars/redhat.yml 2026-01-22 12:42:43.844513 | fa163e0d-6f45-64a1-ca76-000000001e6e | TIMING | tuned : Gather variables for each operating system | standalone | 0:03:37.809511 | 0.12s 2026-01-22 12:42:43.854252 | fa163e0d-6f45-64a1-ca76-000000001e6e | TIMING | tuned : Gather variables for each operating system | standalone | 0:03:37.819259 | 0.13s 2026-01-22 12:42:43.932870 | fa163e0d-6f45-64a1-ca76-000000001e6f | TIMING | tuned : include_tasks | standalone | 0:03:37.897870 | 0.05s 2026-01-22 12:42:43.953763 | 31787bcd-67ab-4c2f-b395-4310af269c47 | INCLUDED | /usr/share/ansible/roles/tuned/tasks/tuned_install.yml | standalone 2026-01-22 12:42:43.986455 | fa163e0d-6f45-64a1-ca76-000000001f9f | TASK | Check tuned package is installed 2026-01-22 12:42:44.249493 | fa163e0d-6f45-64a1-ca76-000000001f9f | CHANGED | Check tuned package is installed | standalone 2026-01-22 12:42:44.250967 | fa163e0d-6f45-64a1-ca76-000000001f9f | TIMING | tuned : Check tuned package is installed | standalone | 0:03:38.215975 | 0.26s 2026-01-22 12:42:44.277766 | fa163e0d-6f45-64a1-ca76-000000001fa0 | TASK | Install tuned 2026-01-22 12:42:44.310382 | fa163e0d-6f45-64a1-ca76-000000001fa0 | SKIPPED | Install tuned | standalone 2026-01-22 12:42:44.311678 | fa163e0d-6f45-64a1-ca76-000000001fa0 | TIMING | tuned : Install tuned | standalone | 0:03:38.276655 | 0.03s 2026-01-22 12:42:44.339406 | fa163e0d-6f45-64a1-ca76-000000001fa1 | TASK | Restart tuned 2026-01-22 12:42:44.369874 | fa163e0d-6f45-64a1-ca76-000000001fa1 | SKIPPED | Restart tuned | standalone 2026-01-22 12:42:44.371065 | fa163e0d-6f45-64a1-ca76-000000001fa1 | TIMING | tuned : Restart tuned | standalone | 0:03:38.336069 | 0.03s 2026-01-22 12:42:44.396819 | fa163e0d-6f45-64a1-ca76-000000001e70 | TASK | Check for tuned-adm 2026-01-22 12:42:44.624843 | fa163e0d-6f45-64a1-ca76-000000001e70 | OK | Check for tuned-adm | standalone 2026-01-22 12:42:44.626328 | fa163e0d-6f45-64a1-ca76-000000001e70 | TIMING | tuned : Check for tuned-adm | standalone | 0:03:38.591334 | 0.23s 2026-01-22 12:42:44.686863 | fa163e0d-6f45-64a1-ca76-000000001e71 | TIMING | tuned : include_tasks | standalone | 0:03:38.651860 | 0.03s 2026-01-22 12:42:44.714208 | b979ab5a-1da4-4432-bee4-49279dfa251f | INCLUDED | /usr/share/ansible/roles/tuned/tasks/tuned_config.yml | standalone 2026-01-22 12:42:44.749005 | fa163e0d-6f45-64a1-ca76-000000001fcc | TASK | Ensure profile directory exists 2026-01-22 12:42:44.820249 | fa163e0d-6f45-64a1-ca76-000000001fcc | SKIPPED | Ensure profile directory exists | standalone 2026-01-22 12:42:44.821572 | fa163e0d-6f45-64a1-ca76-000000001fcc | TIMING | tuned : Ensure profile directory exists | standalone | 0:03:38.786578 | 0.07s 2026-01-22 12:42:44.847295 | fa163e0d-6f45-64a1-ca76-000000001fcd | TASK | Create custom tuned profile 2026-01-22 12:42:44.908986 | fa163e0d-6f45-64a1-ca76-000000001fcd | SKIPPED | Create custom tuned profile | standalone 2026-01-22 12:42:44.910247 | fa163e0d-6f45-64a1-ca76-000000001fcd | TIMING | tuned : Create custom tuned profile | standalone | 0:03:38.875255 | 0.06s 2026-01-22 12:42:44.935601 | fa163e0d-6f45-64a1-ca76-000000001fce | TASK | Check tuned active profile 2026-01-22 12:42:45.159112 | fa163e0d-6f45-64a1-ca76-000000001fce | OK | Check tuned active profile | standalone 2026-01-22 12:42:45.160430 | fa163e0d-6f45-64a1-ca76-000000001fce | TIMING | tuned : Check tuned active profile | standalone | 0:03:39.125440 | 0.22s 2026-01-22 12:42:45.184970 | fa163e0d-6f45-64a1-ca76-000000001fcf | TASK | Check Tuned Configuration file exists 2026-01-22 12:42:45.412774 | fa163e0d-6f45-64a1-ca76-000000001fcf | OK | Check Tuned Configuration file exists | standalone 2026-01-22 12:42:45.414104 | fa163e0d-6f45-64a1-ca76-000000001fcf | TIMING | tuned : Check Tuned Configuration file exists | standalone | 0:03:39.379111 | 0.23s 2026-01-22 12:42:45.439097 | fa163e0d-6f45-64a1-ca76-000000001fd0 | TASK | Fail if tuned profile conf is absent but isolated cores is provided 2026-01-22 12:42:45.504197 | fa163e0d-6f45-64a1-ca76-000000001fd0 | SKIPPED | Fail if tuned profile conf is absent but isolated cores is provided | standalone 2026-01-22 12:42:45.505633 | fa163e0d-6f45-64a1-ca76-000000001fd0 | TIMING | tuned : Fail if tuned profile conf is absent but isolated cores is provided | standalone | 0:03:39.470639 | 0.07s 2026-01-22 12:42:45.533353 | fa163e0d-6f45-64a1-ca76-000000001fd1 | TASK | Configure isolated cores for profile throughput-performance 2026-01-22 12:42:45.595629 | fa163e0d-6f45-64a1-ca76-000000001fd1 | SKIPPED | Configure isolated cores for profile throughput-performance | standalone 2026-01-22 12:42:45.596878 | fa163e0d-6f45-64a1-ca76-000000001fd1 | TIMING | tuned : Configure isolated cores for profile throughput-performance | standalone | 0:03:39.561887 | 0.06s 2026-01-22 12:42:45.619782 | fa163e0d-6f45-64a1-ca76-000000001fd2 | TASK | Enable tuned profile 2026-01-22 12:42:45.661050 | fa163e0d-6f45-64a1-ca76-000000001fd2 | SKIPPED | Enable tuned profile | standalone 2026-01-22 12:42:45.661802 | fa163e0d-6f45-64a1-ca76-000000001fd2 | TIMING | tuned : Enable tuned profile | standalone | 0:03:39.626818 | 0.04s PLAY [Overcloud container setup tasks] ***************************************** 2026-01-22 12:42:45.806632 | fa163e0d-6f45-64a1-ca76-0000000000ac | TASK | Overcloud container setup tasks 2026-01-22 12:42:45.840241 | fa163e0d-6f45-64a1-ca76-0000000000ac | OK | Overcloud container setup tasks | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Overcloud container setup tasks' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000ac') missing from stats 2026-01-22 12:42:45.860558 | fa163e0d-6f45-64a1-ca76-0000000000ad | TASK | Write config data 2026-01-22 12:42:45.922095 | fa163e0d-6f45-64a1-ca76-0000000000ad | TIMING | Write config data | standalone | 0:03:39.887085 | 0.06s 2026-01-22 12:42:45.965107 | 0859088b-fbbd-4a8e-b495-5f5cc015037b | INCLUDED | /root/standalone-ansible-mz1ymllk/common_container_setup_tasks.yaml | standalone 2026-01-22 12:42:45.989620 | fa163e0d-6f45-64a1-ca76-000000002030 | TASK | Create and ensure setype for /var/log/containers directory 2026-01-22 12:42:46.233825 | fa163e0d-6f45-64a1-ca76-000000002030 | OK | Create and ensure setype for /var/log/containers directory | standalone 2026-01-22 12:42:46.235516 | fa163e0d-6f45-64a1-ca76-000000002030 | TIMING | Create and ensure setype for /var/log/containers directory | standalone | 0:03:40.200522 | 0.25s 2026-01-22 12:42:46.262694 | fa163e0d-6f45-64a1-ca76-000000002031 | TASK | Create ContainerLogStdoutPath directory 2026-01-22 12:42:46.511698 | fa163e0d-6f45-64a1-ca76-000000002031 | CHANGED | Create ContainerLogStdoutPath directory | standalone 2026-01-22 12:42:46.513559 | fa163e0d-6f45-64a1-ca76-000000002031 | TIMING | Create ContainerLogStdoutPath directory | standalone | 0:03:40.478558 | 0.25s 2026-01-22 12:42:46.542835 | fa163e0d-6f45-64a1-ca76-000000002032 | TASK | Create /var/lib/tripleo-config directory 2026-01-22 12:42:46.787494 | fa163e0d-6f45-64a1-ca76-000000002032 | OK | Create /var/lib/tripleo-config directory | standalone 2026-01-22 12:42:46.788995 | fa163e0d-6f45-64a1-ca76-000000002032 | TIMING | Create /var/lib/tripleo-config directory | standalone | 0:03:40.753999 | 0.24s 2026-01-22 12:42:46.814603 | fa163e0d-6f45-64a1-ca76-000000002033 | TASK | Clean old /var/lib/container-startup-configs.json file 2026-01-22 12:42:47.053430 | fa163e0d-6f45-64a1-ca76-000000002033 | OK | Clean old /var/lib/container-startup-configs.json file | standalone 2026-01-22 12:42:47.054913 | fa163e0d-6f45-64a1-ca76-000000002033 | TIMING | Clean old /var/lib/container-startup-configs.json file | standalone | 0:03:41.019921 | 0.24s 2026-01-22 12:42:47.079721 | fa163e0d-6f45-64a1-ca76-000000002034 | TASK | Clean old /var/lib/docker-container-startup-configs.json file 2026-01-22 12:42:47.310987 | fa163e0d-6f45-64a1-ca76-000000002034 | OK | Clean old /var/lib/docker-container-startup-configs.json file | standalone 2026-01-22 12:42:47.312227 | fa163e0d-6f45-64a1-ca76-000000002034 | TIMING | Clean old /var/lib/docker-container-startup-configs.json file | standalone | 0:03:41.277235 | 0.23s 2026-01-22 12:42:47.337568 | fa163e0d-6f45-64a1-ca76-000000002038 | TASK | Create fcontext entry for container-config-scripts 2026-01-22 12:42:48.619318 | fa163e0d-6f45-64a1-ca76-000000002038 | CHANGED | Create fcontext entry for container-config-scripts | standalone 2026-01-22 12:42:48.621243 | fa163e0d-6f45-64a1-ca76-000000002038 | TIMING | Create fcontext entry for container-config-scripts | standalone | 0:03:42.586242 | 1.28s 2026-01-22 12:42:48.648042 | fa163e0d-6f45-64a1-ca76-000000002039 | TASK | Create /var/lib/container-config-scripts 2026-01-22 12:42:48.897435 | fa163e0d-6f45-64a1-ca76-000000002039 | CHANGED | Create /var/lib/container-config-scripts | standalone 2026-01-22 12:42:48.899392 | fa163e0d-6f45-64a1-ca76-000000002039 | TIMING | Create /var/lib/container-config-scripts | standalone | 0:03:42.864391 | 0.25s 2026-01-22 12:42:48.928639 | fa163e0d-6f45-64a1-ca76-00000000203a | TASK | Write container config scripts 2026-01-22 12:42:49.286297 | fa163e0d-6f45-64a1-ca76-00000000203a | CHANGED | Write container config scripts | standalone 2026-01-22 12:42:49.287721 | fa163e0d-6f45-64a1-ca76-00000000203a | TIMING | Write container config scripts | standalone | 0:03:43.252726 | 0.36s 2026-01-22 12:42:49.316768 | fa163e0d-6f45-64a1-ca76-00000000203b | TASK | Write /var/lib/tripleo-config/container-startup-config-readme.txt 2026-01-22 12:42:49.795860 | fa163e0d-6f45-64a1-ca76-00000000203b | CHANGED | Write /var/lib/tripleo-config/container-startup-config-readme.txt | standalone 2026-01-22 12:42:49.797149 | fa163e0d-6f45-64a1-ca76-00000000203b | TIMING | Write /var/lib/tripleo-config/container-startup-config-readme.txt | standalone | 0:03:43.762156 | 0.48s 2026-01-22 12:42:49.822116 | fa163e0d-6f45-64a1-ca76-00000000203c | TASK | Generate startup configs files per step and per container 2026-01-22 12:42:50.497016 | fa163e0d-6f45-64a1-ca76-00000000203c | CHANGED | Generate startup configs files per step and per container | standalone 2026-01-22 12:42:50.501677 | fa163e0d-6f45-64a1-ca76-00000000203c | TIMING | Generate startup configs files per step and per container | standalone | 0:03:44.466689 | 0.68s 2026-01-22 12:42:50.526118 | fa163e0d-6f45-64a1-ca76-00000000203d | TASK | Create /var/lib/kolla/config_files directory 2026-01-22 12:42:50.757973 | fa163e0d-6f45-64a1-ca76-00000000203d | CHANGED | Create /var/lib/kolla/config_files directory | standalone 2026-01-22 12:42:50.759767 | fa163e0d-6f45-64a1-ca76-00000000203d | TIMING | Create /var/lib/kolla/config_files directory | standalone | 0:03:44.724769 | 0.23s 2026-01-22 12:42:50.785913 | fa163e0d-6f45-64a1-ca76-00000000203e | TASK | Create /var/lib/config-data directory 2026-01-22 12:42:51.012737 | fa163e0d-6f45-64a1-ca76-00000000203e | CHANGED | Create /var/lib/config-data directory | standalone 2026-01-22 12:42:51.014722 | fa163e0d-6f45-64a1-ca76-00000000203e | TIMING | Create /var/lib/config-data directory | standalone | 0:03:44.979722 | 0.23s 2026-01-22 12:42:51.043025 | fa163e0d-6f45-64a1-ca76-00000000203f | TASK | Write container config json files 2026-01-22 12:42:51.516785 | fa163e0d-6f45-64a1-ca76-00000000203f | CHANGED | Write container config json files | standalone 2026-01-22 12:42:51.520975 | fa163e0d-6f45-64a1-ca76-00000000203f | TIMING | Write container config json files | standalone | 0:03:45.485986 | 0.48s PLAY [Pre Deployment Step Tasks] *********************************************** 2026-01-22 12:42:51.638703 | fa163e0d-6f45-64a1-ca76-0000000000b1 | TASK | Pre Deployment Step Tasks 2026-01-22 12:42:51.666939 | fa163e0d-6f45-64a1-ca76-0000000000b1 | OK | Pre Deployment Step Tasks | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Pre Deployment Step Tasks' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000b1') missing from stats [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: '{{ playbook_dir }}/{{ _task_file_path }}' is exists 2026-01-22 12:42:51.724526 | fa163e0d-6f45-64a1-ca76-0000000000b2 | TIMING | include_tasks | standalone | 0:03:45.689518 | 0.04s 2026-01-22 12:42:51.752241 | b90cf9ac-c231-4520-8eb7-0dbe7637c577 | INCLUDED | /root/standalone-ansible-mz1ymllk/Standalone/pre_deploy_step_tasks.yaml | standalone 2026-01-22 12:42:51.792125 | fa163e0d-6f45-64a1-ca76-000000002073 | TASK | Get undercloud FQDN 2026-01-22 12:42:51.864583 | fa163e0d-6f45-64a1-ca76-000000002073 | OK | Get undercloud FQDN | standalone -> localhost [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-000000002073') missing from stats 2026-01-22 12:42:51.932560 | fa163e0d-6f45-64a1-ca76-000000002074 | TIMING | include_role : tripleo_ipa_registration | standalone | 0:03:45.897544 | 0.04s 2026-01-22 12:42:52.010320 | fa163e0d-6f45-64a1-ca76-000000002098 | TASK | set main facts 2026-01-22 12:42:52.083371 | fa163e0d-6f45-64a1-ca76-000000002098 | OK | set main facts | standalone -> localhost [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-000000002098') missing from stats 2026-01-22 12:42:52.130806 | fa163e0d-6f45-64a1-ca76-000000002099 | TASK | get the default.conf file 2026-01-22 12:42:52.385891 | fa163e0d-6f45-64a1-ca76-000000002099 | OK | get the default.conf file | standalone -> localhost [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-000000002099') missing from stats 2026-01-22 12:42:52.449889 | fa163e0d-6f45-64a1-ca76-00000000209a | TASK | set the principal 2026-01-22 12:42:52.515949 | fa163e0d-6f45-64a1-ca76-00000000209a | OK | set the principal | standalone -> localhost [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-00000000209a') missing from stats 2026-01-22 12:42:52.564644 | fa163e0d-6f45-64a1-ca76-00000000209c | TASK | add new host with one-time password 2026-01-22 12:42:53.755787 | fa163e0d-6f45-64a1-ca76-00000000209c | OK | add new host with one-time password | standalone -> localhost [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-00000000209c') missing from stats 2026-01-22 12:42:53.796008 | fa163e0d-6f45-64a1-ca76-00000000209d | TASK | set otp as a host fact 2026-01-22 12:42:53.835929 | fa163e0d-6f45-64a1-ca76-00000000209d | SKIPPED | set otp as a host fact | standalone 2026-01-22 12:42:53.837037 | fa163e0d-6f45-64a1-ca76-00000000209d | TIMING | tripleo_ipa_registration : set otp as a host fact | standalone | 0:03:47.802042 | 0.04s 2026-01-22 12:42:53.883038 | fa163e0d-6f45-64a1-ca76-00000000209f | TASK | add required services using custom module 2026-01-22 12:42:59.634821 | fa163e0d-6f45-64a1-ca76-00000000209f | CHANGED | add required services using custom module | standalone -> localhost [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-00000000209f') missing from stats 2026-01-22 12:42:59.707141 | fa163e0d-6f45-64a1-ca76-000000002076 | TASK | check if default.conf exists 2026-01-22 12:42:59.950460 | fa163e0d-6f45-64a1-ca76-000000002076 | OK | check if default.conf exists | standalone 2026-01-22 12:42:59.951778 | fa163e0d-6f45-64a1-ca76-000000002076 | TIMING | check if default.conf exists | standalone | 0:03:53.916784 | 0.24s 2026-01-22 12:42:59.976129 | fa163e0d-6f45-64a1-ca76-000000002077 | TASK | install openssl-perl 2026-01-22 12:43:00.015252 | fa163e0d-6f45-64a1-ca76-000000002077 | SKIPPED | install openssl-perl | standalone 2026-01-22 12:43:00.016839 | fa163e0d-6f45-64a1-ca76-000000002077 | TIMING | install openssl-perl | standalone | 0:03:53.981843 | 0.04s 2026-01-22 12:43:00.048588 | fa163e0d-6f45-64a1-ca76-000000002078 | TASK | register as an ipa client 2026-01-22 12:43:00.088820 | fa163e0d-6f45-64a1-ca76-000000002078 | SKIPPED | register as an ipa client | standalone 2026-01-22 12:43:00.090073 | fa163e0d-6f45-64a1-ca76-000000002078 | TIMING | register as an ipa client | standalone | 0:03:54.055081 | 0.04s 2026-01-22 12:43:00.114386 | fa163e0d-6f45-64a1-ca76-000000002079 | TASK | restart certmonger service 2026-01-22 12:43:00.142643 | fa163e0d-6f45-64a1-ca76-000000002079 | SKIPPED | restart certmonger service | standalone 2026-01-22 12:43:00.143902 | fa163e0d-6f45-64a1-ca76-000000002079 | TIMING | restart certmonger service | standalone | 0:03:54.108909 | 0.03s 2026-01-22 12:43:00.169073 | fa163e0d-6f45-64a1-ca76-00000000207a | TASK | set discovered ipa realm 2026-01-22 12:43:00.210036 | fa163e0d-6f45-64a1-ca76-00000000207a | OK | set discovered ipa realm | standalone 2026-01-22 12:43:00.211253 | fa163e0d-6f45-64a1-ca76-00000000207a | TIMING | set discovered ipa realm | standalone | 0:03:54.176260 | 0.04s PLAY [External deployment step 1] ********************************************** 2026-01-22 12:43:00.354860 | fa163e0d-6f45-64a1-ca76-0000000000b6 | TASK | External deployment step 1 2026-01-22 12:43:00.383882 | fa163e0d-6f45-64a1-ca76-0000000000b6 | OK | External deployment step 1 | undercloud -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'External deployment step 1' to resume from this task" } [WARNING]: ('undercloud -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000b6') missing from stats 2026-01-22 12:43:00.441874 | fa163e0d-6f45-64a1-ca76-0000000000b7 | TIMING | include_tasks | undercloud | 0:03:54.406875 | 0.04s 2026-01-22 12:43:00.480178 | 7a850cdc-fe83-4d05-8364-21db9eba9b0f | INCLUDED | /root/standalone-ansible-mz1ymllk/external_deploy_steps_tasks_step1.yaml | undercloud 2026-01-22 12:43:00.498949 | fa163e0d-6f45-64a1-ca76-0000000020f5 | TASK | Container image prepare 2026-01-22 12:43:01.505432 | fa163e0d-6f45-64a1-ca76-0000000020f5 | CHANGED | Container image prepare | undercloud 2026-01-22 12:43:01.508214 | fa163e0d-6f45-64a1-ca76-0000000020f5 | TIMING | tripleo_container_image_prepare : Container image prepare | undercloud | 0:03:55.473222 | 1.01s 2026-01-22 12:43:01.594243 | fa163e0d-6f45-64a1-ca76-0000000020fc | TIMING | include_role : tripleo_ipa_dns | undercloud | 0:03:55.559240 | 0.05s 2026-01-22 12:43:01.637738 | fa163e0d-6f45-64a1-ca76-00000000211b | TASK | get dns zones and entries to add 2026-01-22 12:43:01.700856 | fa163e0d-6f45-64a1-ca76-00000000211b | OK | get dns zones and entries to add | undercloud 2026-01-22 12:43:01.702560 | fa163e0d-6f45-64a1-ca76-00000000211b | TIMING | tripleo_ipa_dns : get dns zones and entries to add | undercloud | 0:03:55.667565 | 0.06s 2026-01-22 12:43:01.784468 | fa163e0d-6f45-64a1-ca76-00000000211c | OK | tripleo_ipa_dns : debug | undercloud | result={ "changed": false, "dns_zones": [ "ctlplane.ooo.test", "internalapi.ooo.test", "122.168.192.in-addr.arpa", "0.19.172.in-addr.arpa", "storage.ooo.test", "external.ooo.test", "0.17.172.in-addr.arpa", "0.20.172.in-addr.arpa", "ooo.test", "0.18.172.in-addr.arpa", "storagemgmt.ooo.test", "tenant.ooo.test", "0.21.172.in-addr.arpa" ] } 2026-01-22 12:43:01.785880 | fa163e0d-6f45-64a1-ca76-00000000211c | TIMING | tripleo_ipa_dns : debug | undercloud | 0:03:55.750887 | 0.06s 2026-01-22 12:43:01.869062 | fa163e0d-6f45-64a1-ca76-00000000211d | OK | tripleo_ipa_dns : debug | undercloud | result={ "changed": false, "dns_entries": [ { "record_name": "overcloud", "record_type": "A", "record_value": "192.168.122.99", "zone_name": "ctlplane.ooo.test" }, { "record_name": "overcloud", "record_type": "A", "record_value": "172.18.0.2", "zone_name": "storage.ooo.test" }, { "record_name": "overcloud", "record_type": "A", "record_value": "172.20.0.2", "zone_name": "storagemgmt.ooo.test" }, { "record_name": "overcloud", "record_type": "A", "record_value": "172.17.0.2", "zone_name": "internalapi.ooo.test" }, { "record_name": "standalone", "record_type": "A", "record_value": "172.21.0.2", "zone_name": "ooo.test" }, { "record_name": "standalone", "record_type": "A", "record_value": "172.17.0.100", "zone_name": "ooo.test" }, { "record_name": "standalone", "record_type": "A", "record_value": "172.18.0.100", "zone_name": "storage.ooo.test" }, { "record_name": "standalone", "record_type": "A", "record_value": "172.20.0.100", "zone_name": "storagemgmt.ooo.test" }, { "record_name": "standalone", "record_type": "A", "record_value": "172.17.0.100", "zone_name": "internalapi.ooo.test" }, { "record_name": "standalone", "record_type": "A", "record_value": "172.19.0.100", "zone_name": "tenant.ooo.test" }, { "record_name": "standalone", "record_type": "A", "record_value": "172.21.0.100", "zone_name": "external.ooo.test" }, { "record_name": "standalone", "record_type": "A", "record_value": "192.168.122.100", "zone_name": "ctlplane.ooo.test" }, { "record_name": "100", "record_type": "PTR", "record_value": "standalone.ctlplane.ooo.test.", "zone_name": "122.168.192.in-addr.arpa" }, { "record_name": "100", "record_type": "PTR", "record_value": "standalone.external.ooo.test.", "zone_name": "0.21.172.in-addr.arpa" }, { "record_name": "100", "record_type": "PTR", "record_value": "standalone.tenant.ooo.test.", "zone_name": "0.19.172.in-addr.arpa" }, { "record_name": "100", "record_type": "PTR", "record_value": "standalone.internalapi.ooo.test.", "zone_name": "0.17.172.in-addr.arpa" }, { "record_name": "100", "record_type": "PTR", "record_value": "standalone.storagemgmt.ooo.test.", "zone_name": "0.20.172.in-addr.arpa" }, { "record_name": "100", "record_type": "PTR", "record_value": "standalone.storage.ooo.test.", "zone_name": "0.18.172.in-addr.arpa" }, { "record_name": "2", "record_type": "PTR", "record_value": "standalone.ooo.test.", "zone_name": "0.21.172.in-addr.arpa" }, { "record_name": "2", "record_type": "PTR", "record_value": "overcloud.internalapi.ooo.test.", "zone_name": "0.17.172.in-addr.arpa" }, { "record_name": "2", "record_type": "PTR", "record_value": "overcloud.storagemgmt.ooo.test.", "zone_name": "0.20.172.in-addr.arpa" }, { "record_name": "2", "record_type": "PTR", "record_value": "overcloud.storage.ooo.test.", "zone_name": "0.18.172.in-addr.arpa" }, { "record_name": "99", "record_type": "PTR", "record_value": "overcloud.ctlplane.ooo.test.", "zone_name": "122.168.192.in-addr.arpa" } ] } 2026-01-22 12:43:01.872269 | fa163e0d-6f45-64a1-ca76-00000000211d | TIMING | tripleo_ipa_dns : debug | undercloud | 0:03:55.837270 | 0.07s 2026-01-22 12:43:01.895363 | fa163e0d-6f45-64a1-ca76-00000000211e | TASK | add dns zones and records 2026-01-22 12:43:05.912801 | fa163e0d-6f45-64a1-ca76-00000000211e | CHANGED | add dns zones and records | undercloud 2026-01-22 12:43:05.914564 | fa163e0d-6f45-64a1-ca76-00000000211e | TIMING | tripleo_ipa_dns : add dns zones and records | undercloud | 0:03:59.879572 | 4.02s 2026-01-22 12:43:05.949603 | fa163e0d-6f45-64a1-ca76-0000000020fe | TASK | Create /etc/openstack directory if it does not exist 2026-01-22 12:43:06.178861 | fa163e0d-6f45-64a1-ca76-0000000020fe | CHANGED | Create /etc/openstack directory if it does not exist | undercloud 2026-01-22 12:43:06.180051 | fa163e0d-6f45-64a1-ca76-0000000020fe | TIMING | Create /etc/openstack directory if it does not exist | undercloud | 0:04:00.145058 | 0.23s 2026-01-22 12:43:06.197601 | fa163e0d-6f45-64a1-ca76-0000000020ff | TASK | Configure /etc/openstack/clouds.yaml 2026-01-22 12:43:06.248560 | fa163e0d-6f45-64a1-ca76-0000000020ff | TIMING | Configure /etc/openstack/clouds.yaml | undercloud | 0:04:00.213563 | 0.05s 2026-01-22 12:43:06.293864 | fa163e0d-6f45-64a1-ca76-000000002150 | TASK | Check if /etc/openstack/clouds.yaml exists 2026-01-22 12:43:06.539311 | fa163e0d-6f45-64a1-ca76-000000002150 | OK | Check if /etc/openstack/clouds.yaml exists | undercloud 2026-01-22 12:43:06.540134 | fa163e0d-6f45-64a1-ca76-000000002150 | TIMING | tripleo_keystone_resources : Check if /etc/openstack/clouds.yaml exists | undercloud | 0:04:00.505152 | 0.24s 2026-01-22 12:43:06.555790 | fa163e0d-6f45-64a1-ca76-000000002151 | TASK | Create empty /etc/openstack/clouds.yaml if it does not exist 2026-01-22 12:43:07.023402 | fa163e0d-6f45-64a1-ca76-000000002151 | CHANGED | Create empty /etc/openstack/clouds.yaml if it does not exist | undercloud 2026-01-22 12:43:07.024752 | fa163e0d-6f45-64a1-ca76-000000002151 | TIMING | tripleo_keystone_resources : Create empty /etc/openstack/clouds.yaml if it does not exist | undercloud | 0:04:00.989759 | 0.47s 2026-01-22 12:43:07.044754 | fa163e0d-6f45-64a1-ca76-000000002152 | TASK | Configure /etc/openstack/clouds.yaml 2026-01-22 12:43:07.097256 | fa163e0d-6f45-64a1-ca76-000000002152 | TIMING | Configure /etc/openstack/clouds.yaml | undercloud | 0:04:01.062254 | 0.05s 2026-01-22 12:43:07.143921 | fa163e0d-6f45-64a1-ca76-00000000218c | TASK | Check for a configured destination 2026-01-22 12:43:07.215852 | fa163e0d-6f45-64a1-ca76-00000000218c | SKIPPED | Check for a configured destination | undercloud 2026-01-22 12:43:07.217326 | fa163e0d-6f45-64a1-ca76-00000000218c | TIMING | tripleo_config : Check for a configured destination | undercloud | 0:04:01.182335 | 0.07s 2026-01-22 12:43:07.243741 | fa163e0d-6f45-64a1-ca76-00000000218d | TASK | Generate /etc/openstack/clouds.yaml config 2026-01-22 12:43:07.705836 | fa163e0d-6f45-64a1-ca76-00000000218d | CHANGED | Generate /etc/openstack/clouds.yaml config | undercloud 2026-01-22 12:43:07.707728 | fa163e0d-6f45-64a1-ca76-00000000218d | TIMING | tripleo_config : Generate /etc/openstack/clouds.yaml config | undercloud | 0:04:01.672730 | 0.46s PLAY [Deploy step tasks for 1] ************************************************* 2026-01-22 12:43:07.842251 | fa163e0d-6f45-64a1-ca76-0000000000ba | TASK | Deploy step tasks for 1 2026-01-22 12:43:07.872771 | fa163e0d-6f45-64a1-ca76-0000000000ba | OK | Deploy step tasks for 1 | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Deploy step tasks for 1' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000ba') missing from stats 2026-01-22 12:43:07.898730 | fa163e0d-6f45-64a1-ca76-0000000000bb | TASK | Write the config_step hieradata for the deploy step 1 tasks 2026-01-22 12:43:08.392293 | fa163e0d-6f45-64a1-ca76-0000000000bb | CHANGED | Write the config_step hieradata for the deploy step 1 tasks | standalone 2026-01-22 12:43:08.394001 | fa163e0d-6f45-64a1-ca76-0000000000bb | TIMING | Write the config_step hieradata for the deploy step 1 tasks | standalone | 0:04:02.359006 | 0.49s 2026-01-22 12:43:08.416559 | fa163e0d-6f45-64a1-ca76-0000000000bd | TASK | Check if /var/lib/tripleo-config/container-startup-config/step_1 already exists 2026-01-22 12:43:08.656067 | fa163e0d-6f45-64a1-ca76-0000000000bd | OK | Check if /var/lib/tripleo-config/container-startup-config/step_1 already exists | standalone 2026-01-22 12:43:08.657980 | fa163e0d-6f45-64a1-ca76-0000000000bd | TIMING | Check if /var/lib/tripleo-config/container-startup-config/step_1 already exists | standalone | 0:04:02.622984 | 0.24s 2026-01-22 12:43:08.686637 | fa163e0d-6f45-64a1-ca76-0000000000be | TASK | Write config data at the start of step 1 2026-01-22 12:43:08.768109 | fa163e0d-6f45-64a1-ca76-0000000000be | TIMING | Write config data at the start of step 1 | standalone | 0:04:02.733099 | 0.08s 2026-01-22 12:43:08.812781 | d1ce2eb5-c401-46a3-bb70-ef6cb803cab4 | INCLUDED | /root/standalone-ansible-mz1ymllk/common_deploy_steps_tasks_step_1.yaml | standalone 2026-01-22 12:43:08.837552 | fa163e0d-6f45-64a1-ca76-0000000021cb | TASK | Delete existing /var/lib/tripleo-config/check-mode directory for check mode 2026-01-22 12:43:08.868069 | fa163e0d-6f45-64a1-ca76-0000000021cb | SKIPPED | Delete existing /var/lib/tripleo-config/check-mode directory for check mode | standalone 2026-01-22 12:43:08.868879 | fa163e0d-6f45-64a1-ca76-0000000021cb | TIMING | Delete existing /var/lib/tripleo-config/check-mode directory for check mode | standalone | 0:04:02.833896 | 0.03s 2026-01-22 12:43:08.886090 | fa163e0d-6f45-64a1-ca76-0000000021cc | TASK | Create /var/lib/tripleo-config/check-mode directory for check mode 2026-01-22 12:43:08.914597 | fa163e0d-6f45-64a1-ca76-0000000021cc | SKIPPED | Create /var/lib/tripleo-config/check-mode directory for check mode | standalone 2026-01-22 12:43:08.915169 | fa163e0d-6f45-64a1-ca76-0000000021cc | TIMING | Create /var/lib/tripleo-config/check-mode directory for check mode | standalone | 0:04:02.880189 | 0.03s 2026-01-22 12:43:08.932036 | fa163e0d-6f45-64a1-ca76-0000000021cd | TASK | Write the puppet step_config manifest 2026-01-22 12:43:09.415732 | fa163e0d-6f45-64a1-ca76-0000000021cd | CHANGED | Write the puppet step_config manifest | standalone 2026-01-22 12:43:09.416817 | fa163e0d-6f45-64a1-ca76-0000000021cd | TIMING | Write the puppet step_config manifest | standalone | 0:04:03.381830 | 0.48s 2026-01-22 12:43:09.436540 | fa163e0d-6f45-64a1-ca76-0000000021ce | TASK | Diff puppet step_config manifest changes for check mode 2026-01-22 12:43:09.468990 | fa163e0d-6f45-64a1-ca76-0000000021ce | SKIPPED | Diff puppet step_config manifest changes for check mode | standalone 2026-01-22 12:43:09.470420 | fa163e0d-6f45-64a1-ca76-0000000021ce | TIMING | Diff puppet step_config manifest changes for check mode | standalone | 0:04:03.435427 | 0.03s 2026-01-22 12:43:09.496191 | fa163e0d-6f45-64a1-ca76-0000000021cf | TASK | Diff puppet step_config manifest changes for check mode 2026-01-22 12:43:09.526913 | fa163e0d-6f45-64a1-ca76-0000000021cf | SKIPPED | Diff puppet step_config manifest changes for check mode | standalone 2026-01-22 12:43:09.528073 | fa163e0d-6f45-64a1-ca76-0000000021cf | TIMING | Diff puppet step_config manifest changes for check mode | standalone | 0:04:03.493080 | 0.03s 2026-01-22 12:43:09.553162 | fa163e0d-6f45-64a1-ca76-0000000021d0 | TASK | Create /var/lib/container-puppet 2026-01-22 12:43:09.772400 | fa163e0d-6f45-64a1-ca76-0000000021d0 | OK | Create /var/lib/container-puppet | standalone 2026-01-22 12:43:09.773570 | fa163e0d-6f45-64a1-ca76-0000000021d0 | TIMING | Create /var/lib/container-puppet | standalone | 0:04:03.738582 | 0.22s 2026-01-22 12:43:09.793575 | fa163e0d-6f45-64a1-ca76-0000000021d1 | TASK | Delete existing /var/lib/container-puppet/check-mode for check mode 2026-01-22 12:43:09.821036 | fa163e0d-6f45-64a1-ca76-0000000021d1 | SKIPPED | Delete existing /var/lib/container-puppet/check-mode for check mode | standalone 2026-01-22 12:43:09.821935 | fa163e0d-6f45-64a1-ca76-0000000021d1 | TIMING | Delete existing /var/lib/container-puppet/check-mode for check mode | standalone | 0:04:03.786949 | 0.03s 2026-01-22 12:43:09.845603 | fa163e0d-6f45-64a1-ca76-0000000021d2 | TASK | Create /var/lib/container-puppet/check-mode for check mode 2026-01-22 12:43:09.873793 | fa163e0d-6f45-64a1-ca76-0000000021d2 | SKIPPED | Create /var/lib/container-puppet/check-mode for check mode | standalone 2026-01-22 12:43:09.874494 | fa163e0d-6f45-64a1-ca76-0000000021d2 | TIMING | Create /var/lib/container-puppet/check-mode for check mode | standalone | 0:04:03.839513 | 0.03s 2026-01-22 12:43:09.891282 | fa163e0d-6f45-64a1-ca76-0000000021d3 | TASK | Write container-puppet.json file 2026-01-22 12:43:10.445191 | fa163e0d-6f45-64a1-ca76-0000000021d3 | CHANGED | Write container-puppet.json file | standalone 2026-01-22 12:43:10.446208 | fa163e0d-6f45-64a1-ca76-0000000021d3 | TIMING | Write container-puppet.json file | standalone | 0:04:04.411222 | 0.55s 2026-01-22 12:43:10.465706 | fa163e0d-6f45-64a1-ca76-0000000021d4 | TASK | Diff container-puppet.json changes for check mode 2026-01-22 12:43:10.493732 | fa163e0d-6f45-64a1-ca76-0000000021d4 | SKIPPED | Diff container-puppet.json changes for check mode | standalone 2026-01-22 12:43:10.494624 | fa163e0d-6f45-64a1-ca76-0000000021d4 | TIMING | Diff container-puppet.json changes for check mode | standalone | 0:04:04.459638 | 0.03s 2026-01-22 12:43:10.515150 | fa163e0d-6f45-64a1-ca76-0000000021d5 | TASK | Diff container-puppet.json changes for check mode 2026-01-22 12:43:10.543602 | fa163e0d-6f45-64a1-ca76-0000000021d5 | SKIPPED | Diff container-puppet.json changes for check mode | standalone 2026-01-22 12:43:10.544568 | fa163e0d-6f45-64a1-ca76-0000000021d5 | TIMING | Diff container-puppet.json changes for check mode | standalone | 0:04:04.509582 | 0.03s 2026-01-22 12:43:10.564697 | fa163e0d-6f45-64a1-ca76-0000000021d6 | TASK | Ensure config hashes are up-to-date for container startup configs 2026-01-22 12:43:10.975794 | fa163e0d-6f45-64a1-ca76-0000000021d6 | OK | Ensure config hashes are up-to-date for container startup configs | standalone 2026-01-22 12:43:10.977118 | fa163e0d-6f45-64a1-ca76-0000000021d6 | TIMING | Ensure config hashes are up-to-date for container startup configs | standalone | 0:04:04.942127 | 0.41s 2026-01-22 12:43:11.001605 | fa163e0d-6f45-64a1-ca76-0000000021d7 | TASK | Set host puppet debugging fact string 2026-01-22 12:43:11.032698 | fa163e0d-6f45-64a1-ca76-0000000021d7 | SKIPPED | Set host puppet debugging fact string | standalone 2026-01-22 12:43:11.033788 | fa163e0d-6f45-64a1-ca76-0000000021d7 | TIMING | Set host puppet debugging fact string | standalone | 0:04:04.998795 | 0.03s 2026-01-22 12:43:11.059040 | fa163e0d-6f45-64a1-ca76-0000000021d8 | TASK | Check for /etc/puppet/check-mode directory for check mode 2026-01-22 12:43:11.087081 | fa163e0d-6f45-64a1-ca76-0000000021d8 | SKIPPED | Check for /etc/puppet/check-mode directory for check mode | standalone 2026-01-22 12:43:11.088152 | fa163e0d-6f45-64a1-ca76-0000000021d8 | TIMING | Check for /etc/puppet/check-mode directory for check mode | standalone | 0:04:05.053161 | 0.03s 2026-01-22 12:43:11.112846 | fa163e0d-6f45-64a1-ca76-0000000021d9 | TASK | Create /etc/puppet/check-mode/hieradata directory for check mode 2026-01-22 12:43:11.141788 | fa163e0d-6f45-64a1-ca76-0000000021d9 | SKIPPED | Create /etc/puppet/check-mode/hieradata directory for check mode | standalone 2026-01-22 12:43:11.142630 | fa163e0d-6f45-64a1-ca76-0000000021d9 | TIMING | Create /etc/puppet/check-mode/hieradata directory for check mode | standalone | 0:04:05.107627 | 0.03s 2026-01-22 12:43:11.167724 | fa163e0d-6f45-64a1-ca76-0000000021da | TASK | Create puppet check-mode files if they don't exist for check mode 2026-01-22 12:43:11.198061 | fa163e0d-6f45-64a1-ca76-0000000021da | SKIPPED | Create puppet check-mode files if they don't exist for check mode | standalone 2026-01-22 12:43:11.199127 | fa163e0d-6f45-64a1-ca76-0000000021da | TIMING | Create puppet check-mode files if they don't exist for check mode | standalone | 0:04:05.164137 | 0.03s [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: '{{ playbook_dir }}/{{ _task_file_path }}' is exists 2026-01-22 12:43:11.272551 | fa163e0d-6f45-64a1-ca76-0000000000c0 | TIMING | include_tasks | standalone | 0:04:05.237550 | 0.05s 2026-01-22 12:43:11.408277 | a465a9a5-564b-4edf-8a65-1bd6cf1bdc31 | INCLUDED | /root/standalone-ansible-mz1ymllk/Standalone/deploy_steps_tasks_step1.yaml | standalone 2026-01-22 12:43:11.454532 | fa163e0d-6f45-64a1-ca76-000000002205 | TASK | Create dirs for certificates and keys 2026-01-22 12:43:11.768531 | fa163e0d-6f45-64a1-ca76-000000002205 | CHANGED | Create dirs for certificates and keys | standalone | item=/etc/pki/tls/certs/httpd 2026-01-22 12:43:11.769993 | fa163e0d-6f45-64a1-ca76-000000002205 | TIMING | Create dirs for certificates and keys | standalone | 0:04:05.734995 | 0.31s 2026-01-22 12:43:12.012361 | fa163e0d-6f45-64a1-ca76-000000002205 | CHANGED | Create dirs for certificates and keys | standalone | item=/etc/pki/tls/private/httpd 2026-01-22 12:43:12.013465 | fa163e0d-6f45-64a1-ca76-000000002205 | TIMING | Create dirs for certificates and keys | standalone | 0:04:05.978473 | 0.56s 2026-01-22 12:43:12.020094 | fa163e0d-6f45-64a1-ca76-000000002205 | TIMING | Create dirs for certificates and keys | standalone | 0:04:05.985097 | 0.56s 2026-01-22 12:43:12.093383 | fa163e0d-6f45-64a1-ca76-000000002206 | TIMING | include_role : linux-system-roles.certificate | standalone | 0:04:06.058378 | 0.05s 2026-01-22 12:43:12.164267 | fa163e0d-6f45-64a1-ca76-00000000231c | TASK | Set version specific variables 2026-01-22 12:43:12.200078 | fa163e0d-6f45-64a1-ca76-00000000231c | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:04:06.165088 | 0.03s 2026-01-22 12:43:12.212276 | a30de972-77e6-4e28-868e-1409070f240b | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:43:12.234676 | fa163e0d-6f45-64a1-ca76-00000000234f | TASK | Ensure ansible_facts used by role 2026-01-22 12:43:12.646303 | fa163e0d-6f45-64a1-ca76-00000000234f | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:43:12.648242 | fa163e0d-6f45-64a1-ca76-00000000234f | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:04:06.613249 | 0.41s 2026-01-22 12:43:12.676547 | fa163e0d-6f45-64a1-ca76-000000002350 | TASK | Set platform/version specific variables 2026-01-22 12:43:12.761396 | fa163e0d-6f45-64a1-ca76-000000002350 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:43:12.797979 | fa163e0d-6f45-64a1-ca76-000000002350 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:43:12.829342 | fa163e0d-6f45-64a1-ca76-000000002350 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:43:12.849249 | fa163e0d-6f45-64a1-ca76-000000002350 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:43:12.855079 | fa163e0d-6f45-64a1-ca76-000000002350 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:04:06.820080 | 0.18s 2026-01-22 12:43:12.881333 | fa163e0d-6f45-64a1-ca76-00000000231d | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:43:15.808819 | fa163e0d-6f45-64a1-ca76-00000000231d | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:43:15.810207 | fa163e0d-6f45-64a1-ca76-00000000231d | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:04:09.775215 | 2.93s 2026-01-22 12:43:15.835401 | fa163e0d-6f45-64a1-ca76-00000000231f | TASK | Ensure provider packages are installed 2026-01-22 12:43:18.813146 | fa163e0d-6f45-64a1-ca76-00000000231f | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:04:12.778139 | 2.98s 2026-01-22 12:43:18.825599 | fa163e0d-6f45-64a1-ca76-00000000231f | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:04:12.790599 | 2.99s 2026-01-22 12:43:18.851437 | fa163e0d-6f45-64a1-ca76-000000002321 | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:43:19.127723 | fa163e0d-6f45-64a1-ca76-000000002321 | CHANGED | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:43:19.130466 | fa163e0d-6f45-64a1-ca76-000000002321 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:04:13.095465 | 0.28s 2026-01-22 12:43:19.139977 | fa163e0d-6f45-64a1-ca76-000000002321 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:04:13.104980 | 0.29s 2026-01-22 12:43:19.165912 | fa163e0d-6f45-64a1-ca76-000000002323 | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:43:19.468776 | fa163e0d-6f45-64a1-ca76-000000002323 | CHANGED | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:43:19.470313 | fa163e0d-6f45-64a1-ca76-000000002323 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:04:13.435319 | 0.30s 2026-01-22 12:43:19.476630 | fa163e0d-6f45-64a1-ca76-000000002323 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:04:13.441631 | 0.31s 2026-01-22 12:43:19.503511 | fa163e0d-6f45-64a1-ca76-000000002325 | TASK | Ensure provider service is running 2026-01-22 12:43:20.250411 | fa163e0d-6f45-64a1-ca76-000000002325 | CHANGED | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:43:20.255065 | fa163e0d-6f45-64a1-ca76-000000002325 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:04:14.220069 | 0.75s 2026-01-22 12:43:20.264040 | fa163e0d-6f45-64a1-ca76-000000002325 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:04:14.229043 | 0.76s 2026-01-22 12:43:20.290987 | fa163e0d-6f45-64a1-ca76-000000002328 | TASK | Ensure certificate requests 2026-01-22 12:43:22.425341 | fa163e0d-6f45-64a1-ca76-000000002328 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.ctlplane.ooo.test', 'key_size': '2048', 'name': 'httpd-ctlplane', 'principal': 'HTTP/standalone.ctlplane.ooo.test@OOO.TEST', 'run_after': 'cp /etc/pki/tls/certs/httpd-ctlplane.crt /etc/pki/tls/certs/httpd/httpd-ctlplane.crt\ncp /etc/pki/tls/private/httpd-ctlplane.key /etc/pki/tls/private/httpd/httpd-ctlplane.key\npkill -USR1 httpd\n'} 2026-01-22 12:43:22.428785 | fa163e0d-6f45-64a1-ca76-000000002328 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:04:16.393779 | 2.14s 2026-01-22 12:43:24.892244 | fa163e0d-6f45-64a1-ca76-000000002328 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.storage.ooo.test', 'key_size': '2048', 'name': 'httpd-storage', 'principal': 'HTTP/standalone.storage.ooo.test@OOO.TEST', 'run_after': 'cp /etc/pki/tls/certs/httpd-storage.crt /etc/pki/tls/certs/httpd/httpd-storage.crt\ncp /etc/pki/tls/private/httpd-storage.key /etc/pki/tls/private/httpd/httpd-storage.key\npkill -USR1 httpd\n'} 2026-01-22 12:43:24.894933 | fa163e0d-6f45-64a1-ca76-000000002328 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:04:18.859940 | 4.60s 2026-01-22 12:43:26.839859 | fa163e0d-6f45-64a1-ca76-000000002328 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.storagemgmt.ooo.test', 'key_size': '2048', 'name': 'httpd-storage_mgmt', 'principal': 'HTTP/standalone.storagemgmt.ooo.test@OOO.TEST', 'run_after': 'cp /etc/pki/tls/certs/httpd-storage_mgmt.crt /etc/pki/tls/certs/httpd/httpd-storage_mgmt.crt\ncp /etc/pki/tls/private/httpd-storage_mgmt.key /etc/pki/tls/private/httpd/httpd-storage_mgmt.key\npkill -USR1 httpd\n'} 2026-01-22 12:43:26.841076 | fa163e0d-6f45-64a1-ca76-000000002328 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:04:20.806087 | 6.55s 2026-01-22 12:43:28.400258 | fa163e0d-6f45-64a1-ca76-000000002328 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'httpd-internal_api', 'principal': 'HTTP/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': 'cp /etc/pki/tls/certs/httpd-internal_api.crt /etc/pki/tls/certs/httpd/httpd-internal_api.crt\ncp /etc/pki/tls/private/httpd-internal_api.key /etc/pki/tls/private/httpd/httpd-internal_api.key\npkill -USR1 httpd\n'} 2026-01-22 12:43:28.402247 | fa163e0d-6f45-64a1-ca76-000000002328 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:04:22.367258 | 8.11s 2026-01-22 12:43:29.882483 | fa163e0d-6f45-64a1-ca76-000000002328 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.external.ooo.test', 'key_size': '2048', 'name': 'httpd-external', 'principal': 'HTTP/standalone.external.ooo.test@OOO.TEST', 'run_after': 'cp /etc/pki/tls/certs/httpd-external.crt /etc/pki/tls/certs/httpd/httpd-external.crt\ncp /etc/pki/tls/private/httpd-external.key /etc/pki/tls/private/httpd/httpd-external.key\npkill -USR1 httpd\n'} 2026-01-22 12:43:29.883754 | fa163e0d-6f45-64a1-ca76-000000002328 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:04:23.848769 | 9.59s 2026-01-22 12:43:29.890913 | fa163e0d-6f45-64a1-ca76-000000002328 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:04:23.855918 | 9.60s 2026-01-22 12:43:29.943303 | fa163e0d-6f45-64a1-ca76-00000000220d | TASK | Fail if container image is undefined 2026-01-22 12:43:30.008110 | fa163e0d-6f45-64a1-ca76-00000000220d | SKIPPED | Fail if container image is undefined | standalone 2026-01-22 12:43:30.008996 | fa163e0d-6f45-64a1-ca76-00000000220d | TIMING | tripleo_container_tag : Fail if container image is undefined | standalone | 0:04:23.974010 | 0.06s 2026-01-22 12:43:30.043051 | fa163e0d-6f45-64a1-ca76-00000000220e | TASK | Pull registry.redhat.io/rhosp-rhel9/openstack-cinder-backup:17.1 image 2026-01-22 12:43:55.082633 | fa163e0d-6f45-64a1-ca76-00000000220e | CHANGED | Pull registry.redhat.io/rhosp-rhel9/openstack-cinder-backup:17.1 image | standalone 2026-01-22 12:43:55.084110 | fa163e0d-6f45-64a1-ca76-00000000220e | TIMING | tripleo_container_tag : Pull registry.redhat.io/rhosp-rhel9/openstack-cinder-backup:17.1 image | standalone | 0:04:49.049117 | 25.04s 2026-01-22 12:43:55.141945 | fa163e0d-6f45-64a1-ca76-00000000220f | TASK | Tag cluster.common.tag/cinder-backup:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-cinder-backup:17.1 image 2026-01-22 12:43:55.477351 | fa163e0d-6f45-64a1-ca76-00000000220f | CHANGED | Tag cluster.common.tag/cinder-backup:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-cinder-backup:17.1 image | standalone 2026-01-22 12:43:55.478788 | fa163e0d-6f45-64a1-ca76-00000000220f | TIMING | tripleo_container_tag : Tag cluster.common.tag/cinder-backup:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-cinder-backup:17.1 image | standalone | 0:04:49.443796 | 0.33s 2026-01-22 12:43:55.528693 | fa163e0d-6f45-64a1-ca76-00000000221e | TASK | Fail if container image is undefined 2026-01-22 12:43:55.592621 | fa163e0d-6f45-64a1-ca76-00000000221e | SKIPPED | Fail if container image is undefined | standalone 2026-01-22 12:43:55.593891 | fa163e0d-6f45-64a1-ca76-00000000221e | TIMING | tripleo_container_tag : Fail if container image is undefined | standalone | 0:04:49.558898 | 0.06s 2026-01-22 12:43:55.636476 | fa163e0d-6f45-64a1-ca76-00000000221f | TASK | Pull registry.redhat.io/rhosp-rhel9/openstack-cinder-volume:17.1 image 2026-01-22 12:44:04.477807 | fa163e0d-6f45-64a1-ca76-00000000221f | CHANGED | Pull registry.redhat.io/rhosp-rhel9/openstack-cinder-volume:17.1 image | standalone 2026-01-22 12:44:04.479374 | fa163e0d-6f45-64a1-ca76-00000000221f | TIMING | tripleo_container_tag : Pull registry.redhat.io/rhosp-rhel9/openstack-cinder-volume:17.1 image | standalone | 0:04:58.444381 | 8.84s 2026-01-22 12:44:04.536022 | fa163e0d-6f45-64a1-ca76-000000002220 | TASK | Tag cluster.common.tag/cinder-volume:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-cinder-volume:17.1 image 2026-01-22 12:44:04.893582 | fa163e0d-6f45-64a1-ca76-000000002220 | CHANGED | Tag cluster.common.tag/cinder-volume:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-cinder-volume:17.1 image | standalone 2026-01-22 12:44:04.894866 | fa163e0d-6f45-64a1-ca76-000000002220 | TIMING | tripleo_container_tag : Tag cluster.common.tag/cinder-volume:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-cinder-volume:17.1 image | standalone | 0:04:58.859875 | 0.36s 2026-01-22 12:44:04.948744 | fa163e0d-6f45-64a1-ca76-00000000222b | TASK | Check if rsyslog exists 2026-01-22 12:44:05.209834 | fa163e0d-6f45-64a1-ca76-00000000222b | CHANGED | Check if rsyslog exists | standalone 2026-01-22 12:44:05.211221 | fa163e0d-6f45-64a1-ca76-00000000222b | TIMING | Check if rsyslog exists | standalone | 0:04:59.176229 | 0.26s 2026-01-22 12:44:05.238427 | fa163e0d-6f45-64a1-ca76-00000000222d | TASK | Forward logging to haproxy.log file 2026-01-22 12:44:05.504473 | fa163e0d-6f45-64a1-ca76-00000000222d | CHANGED | Forward logging to haproxy.log file | standalone 2026-01-22 12:44:05.506028 | fa163e0d-6f45-64a1-ca76-00000000222d | TIMING | Forward logging to haproxy.log file | standalone | 0:04:59.471033 | 0.27s 2026-01-22 12:44:05.531944 | fa163e0d-6f45-64a1-ca76-00000000222e | TASK | restart rsyslog service after logging conf change 2026-01-22 12:44:06.400819 | fa163e0d-6f45-64a1-ca76-00000000222e | CHANGED | restart rsyslog service after logging conf change | standalone 2026-01-22 12:44:06.402784 | fa163e0d-6f45-64a1-ca76-00000000222e | TIMING | restart rsyslog service after logging conf change | standalone | 0:05:00.367790 | 0.87s 2026-01-22 12:44:06.430723 | fa163e0d-6f45-64a1-ca76-000000002235 | TASK | Fail if container image is undefined 2026-01-22 12:44:06.492765 | fa163e0d-6f45-64a1-ca76-000000002235 | SKIPPED | Fail if container image is undefined | standalone 2026-01-22 12:44:06.494080 | fa163e0d-6f45-64a1-ca76-000000002235 | TIMING | tripleo_container_tag : Fail if container image is undefined | standalone | 0:05:00.459086 | 0.06s 2026-01-22 12:44:06.540234 | fa163e0d-6f45-64a1-ca76-000000002236 | TASK | Pull registry.redhat.io/rhosp-rhel9/openstack-haproxy:17.1 image 2026-01-22 12:44:11.551828 | fa163e0d-6f45-64a1-ca76-000000002236 | CHANGED | Pull registry.redhat.io/rhosp-rhel9/openstack-haproxy:17.1 image | standalone 2026-01-22 12:44:11.553221 | fa163e0d-6f45-64a1-ca76-000000002236 | TIMING | tripleo_container_tag : Pull registry.redhat.io/rhosp-rhel9/openstack-haproxy:17.1 image | standalone | 0:05:05.518229 | 5.01s 2026-01-22 12:44:11.610373 | fa163e0d-6f45-64a1-ca76-000000002237 | TASK | Tag cluster.common.tag/haproxy:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-haproxy:17.1 image 2026-01-22 12:44:11.966727 | fa163e0d-6f45-64a1-ca76-000000002237 | CHANGED | Tag cluster.common.tag/haproxy:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-haproxy:17.1 image | standalone 2026-01-22 12:44:11.968151 | fa163e0d-6f45-64a1-ca76-000000002237 | TIMING | tripleo_container_tag : Tag cluster.common.tag/haproxy:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-haproxy:17.1 image | standalone | 0:05:05.933160 | 0.36s 2026-01-22 12:44:12.016978 | fa163e0d-6f45-64a1-ca76-000000002242 | TASK | make sure certmonger is installed 2026-01-22 12:44:15.131947 | fa163e0d-6f45-64a1-ca76-000000002242 | OK | make sure certmonger is installed | standalone 2026-01-22 12:44:15.133368 | fa163e0d-6f45-64a1-ca76-000000002242 | TIMING | make sure certmonger is installed | standalone | 0:05:09.098376 | 3.11s 2026-01-22 12:44:15.151769 | fa163e0d-6f45-64a1-ca76-000000002243 | TASK | make sure certmonger service is started 2026-01-22 12:44:15.549622 | fa163e0d-6f45-64a1-ca76-000000002243 | OK | make sure certmonger service is started | standalone 2026-01-22 12:44:15.551584 | fa163e0d-6f45-64a1-ca76-000000002243 | TIMING | make sure certmonger service is started | standalone | 0:05:09.516589 | 0.40s 2026-01-22 12:44:15.577819 | fa163e0d-6f45-64a1-ca76-000000002244 | TASK | Create dirs for certificates and keys 2026-01-22 12:44:15.815338 | fa163e0d-6f45-64a1-ca76-000000002244 | CHANGED | Create dirs for certificates and keys | standalone | item=/etc/pki/tls/certs/haproxy 2026-01-22 12:44:15.818332 | fa163e0d-6f45-64a1-ca76-000000002244 | TIMING | Create dirs for certificates and keys | standalone | 0:05:09.783324 | 0.24s 2026-01-22 12:44:16.047340 | fa163e0d-6f45-64a1-ca76-000000002244 | CHANGED | Create dirs for certificates and keys | standalone | item=/etc/pki/tls/private/haproxy 2026-01-22 12:44:16.048309 | fa163e0d-6f45-64a1-ca76-000000002244 | TIMING | Create dirs for certificates and keys | standalone | 0:05:10.013319 | 0.47s 2026-01-22 12:44:16.053495 | fa163e0d-6f45-64a1-ca76-000000002244 | TIMING | Create dirs for certificates and keys | standalone | 0:05:10.018498 | 0.47s 2026-01-22 12:44:16.078996 | fa163e0d-6f45-64a1-ca76-000000002245 | TASK | Extract and trust certmonger's local CA 2026-01-22 12:44:17.198730 | fa163e0d-6f45-64a1-ca76-000000002245 | CHANGED | Extract and trust certmonger's local CA | standalone 2026-01-22 12:44:17.200092 | fa163e0d-6f45-64a1-ca76-000000002245 | TIMING | Extract and trust certmonger's local CA | standalone | 0:05:11.165102 | 1.12s 2026-01-22 12:44:17.267312 | fa163e0d-6f45-64a1-ca76-000000002246 | TIMING | include_role : linux-system-roles.certificate | standalone | 0:05:11.232308 | 0.04s 2026-01-22 12:44:17.337130 | fa163e0d-6f45-64a1-ca76-0000000023d0 | TASK | Set version specific variables 2026-01-22 12:44:17.388711 | fa163e0d-6f45-64a1-ca76-0000000023d0 | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:05:11.353711 | 0.05s 2026-01-22 12:44:17.407295 | e357a108-ae29-4822-8998-8a409f303eed | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:44:17.430159 | fa163e0d-6f45-64a1-ca76-00000000240f | TASK | Ensure ansible_facts used by role 2026-01-22 12:44:17.827785 | fa163e0d-6f45-64a1-ca76-00000000240f | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:44:17.829785 | fa163e0d-6f45-64a1-ca76-00000000240f | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:05:11.794788 | 0.40s 2026-01-22 12:44:17.857692 | fa163e0d-6f45-64a1-ca76-000000002410 | TASK | Set platform/version specific variables 2026-01-22 12:44:17.937273 | fa163e0d-6f45-64a1-ca76-000000002410 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:44:17.954387 | fa163e0d-6f45-64a1-ca76-000000002410 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:44:17.973274 | fa163e0d-6f45-64a1-ca76-000000002410 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:44:17.987302 | fa163e0d-6f45-64a1-ca76-000000002410 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:44:17.999303 | fa163e0d-6f45-64a1-ca76-000000002410 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:05:11.964303 | 0.14s 2026-01-22 12:44:18.025727 | fa163e0d-6f45-64a1-ca76-0000000023d1 | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:44:20.912762 | fa163e0d-6f45-64a1-ca76-0000000023d1 | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:44:20.914319 | fa163e0d-6f45-64a1-ca76-0000000023d1 | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:05:14.879326 | 2.89s 2026-01-22 12:44:20.940322 | fa163e0d-6f45-64a1-ca76-0000000023d3 | TASK | Ensure provider packages are installed 2026-01-22 12:44:23.901840 | fa163e0d-6f45-64a1-ca76-0000000023d3 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:05:17.866827 | 2.96s 2026-01-22 12:44:23.913807 | fa163e0d-6f45-64a1-ca76-0000000023d3 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:05:17.878803 | 2.97s 2026-01-22 12:44:23.941282 | fa163e0d-6f45-64a1-ca76-0000000023d5 | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:44:24.256606 | fa163e0d-6f45-64a1-ca76-0000000023d5 | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:44:24.258067 | fa163e0d-6f45-64a1-ca76-0000000023d5 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:05:18.223073 | 0.32s 2026-01-22 12:44:24.270092 | fa163e0d-6f45-64a1-ca76-0000000023d5 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:05:18.235104 | 0.33s 2026-01-22 12:44:24.295533 | fa163e0d-6f45-64a1-ca76-0000000023d7 | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:44:24.603273 | fa163e0d-6f45-64a1-ca76-0000000023d7 | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:44:24.605871 | fa163e0d-6f45-64a1-ca76-0000000023d7 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:05:18.570870 | 0.31s 2026-01-22 12:44:24.615243 | fa163e0d-6f45-64a1-ca76-0000000023d7 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:05:18.580256 | 0.32s 2026-01-22 12:44:24.640429 | fa163e0d-6f45-64a1-ca76-0000000023d9 | TASK | Ensure provider service is running 2026-01-22 12:44:25.097261 | fa163e0d-6f45-64a1-ca76-0000000023d9 | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:44:25.099157 | fa163e0d-6f45-64a1-ca76-0000000023d9 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:05:19.064162 | 0.46s 2026-01-22 12:44:25.107777 | fa163e0d-6f45-64a1-ca76-0000000023d9 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:05:19.072789 | 0.47s 2026-01-22 12:44:25.134469 | fa163e0d-6f45-64a1-ca76-0000000023dc | TASK | Ensure certificate requests 2026-01-22 12:44:27.040792 | fa163e0d-6f45-64a1-ca76-0000000023dc | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.ooo.test', 'ip': [], 'key_size': '2048', 'name': 'haproxy-external-cert', 'principal': 'haproxy/standalone.ooo.test@OOO.TEST', 'run_after': '# Copy crt and key for backward compatibility\ncp "/etc/pki/tls/certs/haproxy-external-cert.crt" "/etc/pki/tls/certs/haproxy/overcloud-haproxy-external.crt"\ncp "/etc/pki/tls/private/haproxy-external-cert.key" "/etc/pki/tls/private/haproxy/overcloud-haproxy-external.key"\n\nca_type=ipa\nif [ "$ca_type" = "self-sign" ]; then\n # refresh the ca cert just in case the ca cert has been renewed\n ca_pem=\'/etc/pki/ca-trust/source/anchors/cm-local-ca.pem\'\n openssl pkcs12 -in /var/lib/certmonger/local/creds -out ${ca_pem} -nokeys -nodes -passin pass:\'\'\n chmod 0644 ${ca_pem}\n update-ca-trust extract\n test -e ${ca_pem} && openssl x509 -checkend 0 -noout -in ${ca_pem}\n openssl x509 -in ${ca_pem} -out /tmp/cm-local-ca.pem\n ca_path="/tmp/cm-local-ca.pem"\nelse\n ca_path="/etc/ipa/ca.crt"\nfi\n\nservice_crt="/etc/pki/tls/certs/haproxy/overcloud-haproxy-external.crt"\nservice_key="/etc/pki/tls/private/haproxy/overcloud-haproxy-external.key"\nservice_pem="/etc/pki/tls/private/overcloud_endpoint.pem"\n\ncat "$service_crt" "$ca_path" "$service_key" > "$service_pem"\n\ncontainer_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep -w -E \'haproxy(-bundle-.*-[0-9]+)?\')\n# Inject the new pem into the running container\nif echo "$container_name" | grep -q "^haproxy-bundle"; then\n # lp#1917868: Do not use podman cp with HA containers as they get\n # frozen temporarily and that can make pacemaker operation fail.\n tar -c "$service_pem" | podman exec -i "$container_name" tar -C / -xv\n # no need to update the mount point, because pacemaker\n # recreates the container when it\'s restarted\nelse\n # Refresh the pem at the mount-point\n podman cp $service_pem "$container_name:/var/lib/kolla/config_files/src-tls/$service_pem"\n # Copy the new pem from the mount-point to the real path\n podman exec "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_pem" "$service_pem"\nfi\n# Set appropriate permissions\npodman exec "$container_name" chown haproxy:haproxy "$service_pem"\n# Trigger a reload for HAProxy to read the new certificates\npodman kill --signal HUP "$container_name"\n'} 2026-01-22 12:44:27.043784 | fa163e0d-6f45-64a1-ca76-0000000023dc | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:05:21.008788 | 1.91s 2026-01-22 12:44:27.053583 | fa163e0d-6f45-64a1-ca76-0000000023dc | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:05:21.018586 | 1.92s 2026-01-22 12:44:27.102538 | fa163e0d-6f45-64a1-ca76-000000002248 | TASK | Create dirs for certificates and keys 2026-01-22 12:44:27.367384 | fa163e0d-6f45-64a1-ca76-000000002248 | OK | Create dirs for certificates and keys | standalone | item=/etc/pki/tls/certs/haproxy 2026-01-22 12:44:27.369360 | fa163e0d-6f45-64a1-ca76-000000002248 | TIMING | Create dirs for certificates and keys | standalone | 0:05:21.334350 | 0.27s 2026-01-22 12:44:27.578532 | fa163e0d-6f45-64a1-ca76-000000002248 | OK | Create dirs for certificates and keys | standalone | item=/etc/pki/tls/private/haproxy 2026-01-22 12:44:27.580432 | fa163e0d-6f45-64a1-ca76-000000002248 | TIMING | Create dirs for certificates and keys | standalone | 0:05:21.545442 | 0.48s 2026-01-22 12:44:27.590796 | fa163e0d-6f45-64a1-ca76-000000002248 | TIMING | Create dirs for certificates and keys | standalone | 0:05:21.555804 | 0.49s 2026-01-22 12:44:27.659429 | fa163e0d-6f45-64a1-ca76-000000002249 | TIMING | include_role : linux-system-roles.certificate | standalone | 0:05:21.624433 | 0.05s 2026-01-22 12:44:27.734367 | fa163e0d-6f45-64a1-ca76-00000000245e | TASK | Set version specific variables 2026-01-22 12:44:27.784424 | fa163e0d-6f45-64a1-ca76-00000000245e | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:05:21.749418 | 0.05s 2026-01-22 12:44:27.805035 | 71070919-1f22-4b15-ae43-17fd448b28dd | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:44:27.835980 | fa163e0d-6f45-64a1-ca76-000000002491 | TASK | Ensure ansible_facts used by role 2026-01-22 12:44:28.416033 | fa163e0d-6f45-64a1-ca76-000000002491 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:44:28.416856 | fa163e0d-6f45-64a1-ca76-000000002491 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:05:22.381873 | 0.58s 2026-01-22 12:44:28.434232 | fa163e0d-6f45-64a1-ca76-000000002492 | TASK | Set platform/version specific variables 2026-01-22 12:44:28.510357 | fa163e0d-6f45-64a1-ca76-000000002492 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:44:28.528339 | fa163e0d-6f45-64a1-ca76-000000002492 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:44:28.547372 | fa163e0d-6f45-64a1-ca76-000000002492 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:44:28.561720 | fa163e0d-6f45-64a1-ca76-000000002492 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:44:28.567430 | fa163e0d-6f45-64a1-ca76-000000002492 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:05:22.532436 | 0.13s 2026-01-22 12:44:28.587597 | fa163e0d-6f45-64a1-ca76-00000000245f | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:44:32.149077 | fa163e0d-6f45-64a1-ca76-00000000245f | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:44:32.150557 | fa163e0d-6f45-64a1-ca76-00000000245f | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:05:26.115566 | 3.56s 2026-01-22 12:44:32.175776 | fa163e0d-6f45-64a1-ca76-000000002461 | TASK | Ensure provider packages are installed 2026-01-22 12:44:35.266144 | fa163e0d-6f45-64a1-ca76-000000002461 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:05:29.231144 | 3.09s 2026-01-22 12:44:35.273719 | fa163e0d-6f45-64a1-ca76-000000002461 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:05:29.238729 | 3.10s 2026-01-22 12:44:35.291550 | fa163e0d-6f45-64a1-ca76-000000002463 | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:44:35.613583 | fa163e0d-6f45-64a1-ca76-000000002463 | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:44:35.615783 | fa163e0d-6f45-64a1-ca76-000000002463 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:05:29.580786 | 0.32s 2026-01-22 12:44:35.625947 | fa163e0d-6f45-64a1-ca76-000000002463 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:05:29.590879 | 0.33s 2026-01-22 12:44:35.652112 | fa163e0d-6f45-64a1-ca76-000000002465 | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:44:35.935373 | fa163e0d-6f45-64a1-ca76-000000002465 | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:44:35.937030 | fa163e0d-6f45-64a1-ca76-000000002465 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:05:29.902035 | 0.28s 2026-01-22 12:44:35.949046 | fa163e0d-6f45-64a1-ca76-000000002465 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:05:29.914047 | 0.30s 2026-01-22 12:44:35.975096 | fa163e0d-6f45-64a1-ca76-000000002467 | TASK | Ensure provider service is running 2026-01-22 12:44:37.420606 | fa163e0d-6f45-64a1-ca76-000000002467 | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:44:37.423479 | fa163e0d-6f45-64a1-ca76-000000002467 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:05:31.388485 | 1.45s 2026-01-22 12:44:37.433953 | fa163e0d-6f45-64a1-ca76-000000002467 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:05:31.398955 | 1.46s 2026-01-22 12:44:37.461098 | fa163e0d-6f45-64a1-ca76-00000000246a | TASK | Ensure certificate requests 2026-01-22 12:44:39.455408 | fa163e0d-6f45-64a1-ca76-00000000246a | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': ['standalone.ctlplane.ooo.test', 'overcloud.ctlplane.ooo.test'], 'key_size': '2048', 'name': 'haproxy-ctlplane-cert', 'principal': 'haproxy/standalone.ctlplane.ooo.test@OOO.TEST', 'run_after': '# Copy crt and key for backward compatibility\ncp "/etc/pki/tls/certs/haproxy-ctlplane-cert.crt" "/etc/pki/tls/certs/haproxy/overcloud-haproxy-ctlplane.crt"\ncp "/etc/pki/tls/private/haproxy-ctlplane-cert.key" "/etc/pki/tls/private/haproxy/overcloud-haproxy-ctlplane.key"\n\nca_path="/etc/ipa/ca.crt"\nservice_crt="/etc/pki/tls/certs/haproxy/overcloud-haproxy-ctlplane.crt"\nservice_key="/etc/pki/tls/private/haproxy/overcloud-haproxy-ctlplane.key"\nservice_pem="/etc/pki/tls/certs/haproxy/overcloud-haproxy-ctlplane.pem"\n\ncat "$service_crt" "$ca_path" "$service_key" > "$service_pem"\n\ncontainer_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep -w -E \'haproxy(-bundle-.*-[0-9]+)?\')\n# Inject the new pem into the running container\nif echo "$container_name" | grep -q "^haproxy-bundle"; then\n # lp#1917868: Do not use podman cp with HA containers as they get\n # frozen temporarily and that can make pacemaker operation fail.\n tar -c "$service_pem" | podman exec -i "$container_name" tar -C / -xv\n # no need to update the mount point, because pacemaker\n # recreates the container when it\'s restarted\nelse\n # Refresh the pem at the mount-point\n podman cp $service_pem "$container_name:/var/lib/kolla/config_files/src-tls/$service_pem"\n # Copy the new pem from the mount-point to the real path\n podman exec "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_pem" "$service_pem"\nfi\n# Set appropriate permissions\npodman exec "$container_name" chown haproxy:haproxy "$service_pem"\n# Trigger a reload for HAProxy to read the new certificates\npodman kill --signal HUP "$container_name"\n'} 2026-01-22 12:44:39.458138 | fa163e0d-6f45-64a1-ca76-00000000246a | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:05:33.423139 | 2.00s 2026-01-22 12:44:41.316839 | fa163e0d-6f45-64a1-ca76-00000000246a | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': ['standalone.storage.ooo.test', 'overcloud.storage.ooo.test'], 'key_size': '2048', 'name': 'haproxy-storage-cert', 'principal': 'haproxy/standalone.storage.ooo.test@OOO.TEST', 'run_after': '# Copy crt and key for backward compatibility\ncp "/etc/pki/tls/certs/haproxy-storage-cert.crt" "/etc/pki/tls/certs/haproxy/overcloud-haproxy-storage.crt"\ncp "/etc/pki/tls/private/haproxy-storage-cert.key" "/etc/pki/tls/private/haproxy/overcloud-haproxy-storage.key"\n\nca_path="/etc/ipa/ca.crt"\nservice_crt="/etc/pki/tls/certs/haproxy/overcloud-haproxy-storage.crt"\nservice_key="/etc/pki/tls/private/haproxy/overcloud-haproxy-storage.key"\nservice_pem="/etc/pki/tls/certs/haproxy/overcloud-haproxy-storage.pem"\n\ncat "$service_crt" "$ca_path" "$service_key" > "$service_pem"\n\ncontainer_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep -w -E \'haproxy(-bundle-.*-[0-9]+)?\')\n# Inject the new pem into the running container\nif echo "$container_name" | grep -q "^haproxy-bundle"; then\n # lp#1917868: Do not use podman cp with HA containers as they get\n # frozen temporarily and that can make pacemaker operation fail.\n tar -c "$service_pem" | podman exec -i "$container_name" tar -C / -xv\n # no need to update the mount point, because pacemaker\n # recreates the container when it\'s restarted\nelse\n # Refresh the pem at the mount-point\n podman cp $service_pem "$container_name:/var/lib/kolla/config_files/src-tls/$service_pem"\n # Copy the new pem from the mount-point to the real path\n podman exec "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_pem" "$service_pem"\nfi\n# Set appropriate permissions\npodman exec "$container_name" chown haproxy:haproxy "$service_pem"\n# Trigger a reload for HAProxy to read the new certificates\npodman kill --signal HUP "$container_name"\n'} 2026-01-22 12:44:41.317692 | fa163e0d-6f45-64a1-ca76-00000000246a | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:05:35.282708 | 3.85s 2026-01-22 12:44:43.315310 | fa163e0d-6f45-64a1-ca76-00000000246a | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': ['standalone.storagemgmt.ooo.test', 'overcloud.storagemgmt.ooo.test'], 'key_size': '2048', 'name': 'haproxy-storage_mgmt-cert', 'principal': 'haproxy/standalone.storagemgmt.ooo.test@OOO.TEST', 'run_after': '# Copy crt and key for backward compatibility\ncp "/etc/pki/tls/certs/haproxy-storage_mgmt-cert.crt" "/etc/pki/tls/certs/haproxy/overcloud-haproxy-storage_mgmt.crt"\ncp "/etc/pki/tls/private/haproxy-storage_mgmt-cert.key" "/etc/pki/tls/private/haproxy/overcloud-haproxy-storage_mgmt.key"\n\nca_path="/etc/ipa/ca.crt"\nservice_crt="/etc/pki/tls/certs/haproxy/overcloud-haproxy-storage_mgmt.crt"\nservice_key="/etc/pki/tls/private/haproxy/overcloud-haproxy-storage_mgmt.key"\nservice_pem="/etc/pki/tls/certs/haproxy/overcloud-haproxy-storage_mgmt.pem"\n\ncat "$service_crt" "$ca_path" "$service_key" > "$service_pem"\n\ncontainer_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep -w -E \'haproxy(-bundle-.*-[0-9]+)?\')\n# Inject the new pem into the running container\nif echo "$container_name" | grep -q "^haproxy-bundle"; then\n # lp#1917868: Do not use podman cp with HA containers as they get\n # frozen temporarily and that can make pacemaker operation fail.\n tar -c "$service_pem" | podman exec -i "$container_name" tar -C / -xv\n # no need to update the mount point, because pacemaker\n # recreates the container when it\'s restarted\nelse\n # Refresh the pem at the mount-point\n podman cp $service_pem "$container_name:/var/lib/kolla/config_files/src-tls/$service_pem"\n # Copy the new pem from the mount-point to the real path\n podman exec "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_pem" "$service_pem"\nfi\n# Set appropriate permissions\npodman exec "$container_name" chown haproxy:haproxy "$service_pem"\n# Trigger a reload for HAProxy to read the new certificates\npodman kill --signal HUP "$container_name"\n'} 2026-01-22 12:44:43.316724 | fa163e0d-6f45-64a1-ca76-00000000246a | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:05:37.281733 | 5.85s 2026-01-22 12:44:45.389260 | fa163e0d-6f45-64a1-ca76-00000000246a | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': ['standalone.internalapi.ooo.test', 'overcloud.internalapi.ooo.test'], 'key_size': '2048', 'name': 'haproxy-internal_api-cert', 'principal': 'haproxy/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': '# Copy crt and key for backward compatibility\ncp "/etc/pki/tls/certs/haproxy-internal_api-cert.crt" "/etc/pki/tls/certs/haproxy/overcloud-haproxy-internal_api.crt"\ncp "/etc/pki/tls/private/haproxy-internal_api-cert.key" "/etc/pki/tls/private/haproxy/overcloud-haproxy-internal_api.key"\n\nca_path="/etc/ipa/ca.crt"\nservice_crt="/etc/pki/tls/certs/haproxy/overcloud-haproxy-internal_api.crt"\nservice_key="/etc/pki/tls/private/haproxy/overcloud-haproxy-internal_api.key"\nservice_pem="/etc/pki/tls/certs/haproxy/overcloud-haproxy-internal_api.pem"\n\ncat "$service_crt" "$ca_path" "$service_key" > "$service_pem"\n\ncontainer_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep -w -E \'haproxy(-bundle-.*-[0-9]+)?\')\n# Inject the new pem into the running container\nif echo "$container_name" | grep -q "^haproxy-bundle"; then\n # lp#1917868: Do not use podman cp with HA containers as they get\n # frozen temporarily and that can make pacemaker operation fail.\n tar -c "$service_pem" | podman exec -i "$container_name" tar -C / -xv\n # no need to update the mount point, because pacemaker\n # recreates the container when it\'s restarted\nelse\n # Refresh the pem at the mount-point\n podman cp $service_pem "$container_name:/var/lib/kolla/config_files/src-tls/$service_pem"\n # Copy the new pem from the mount-point to the real path\n podman exec "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_pem" "$service_pem"\nfi\n# Set appropriate permissions\npodman exec "$container_name" chown haproxy:haproxy "$service_pem"\n# Trigger a reload for HAProxy to read the new certificates\npodman kill --signal HUP "$container_name"\n'} 2026-01-22 12:44:45.390583 | fa163e0d-6f45-64a1-ca76-00000000246a | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:05:39.355592 | 7.93s 2026-01-22 12:44:45.403671 | fa163e0d-6f45-64a1-ca76-00000000246a | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:05:39.368688 | 7.94s 2026-01-22 12:44:45.443747 | fa163e0d-6f45-64a1-ca76-00000000224b | TASK | Run lvmfilter role 2026-01-22 12:44:45.493161 | fa163e0d-6f45-64a1-ca76-00000000224b | TIMING | Run lvmfilter role | standalone | 0:05:39.458169 | 0.05s 2026-01-22 12:44:45.552597 | fa163e0d-6f45-64a1-ca76-0000000024fe | TASK | Check if lvm2 is installed 2026-01-22 12:44:45.793229 | fa163e0d-6f45-64a1-ca76-0000000024fe | CHANGED | Check if lvm2 is installed | standalone 2026-01-22 12:44:45.794548 | fa163e0d-6f45-64a1-ca76-0000000024fe | TIMING | tripleo_lvmfilter : Check if lvm2 is installed | standalone | 0:05:39.759557 | 0.24s 2026-01-22 12:44:45.822264 | fa163e0d-6f45-64a1-ca76-000000002500 | TASK | collect ansible devices 2026-01-22 12:44:45.865314 | fa163e0d-6f45-64a1-ca76-000000002500 | SKIPPED | collect ansible devices | standalone 2026-01-22 12:44:45.866143 | fa163e0d-6f45-64a1-ca76-000000002500 | TIMING | tripleo_lvmfilter : collect ansible devices | standalone | 0:05:39.831159 | 0.04s 2026-01-22 12:44:45.883372 | fa163e0d-6f45-64a1-ca76-000000002501 | TASK | collect in-use lvm2 devices list 2026-01-22 12:44:45.918904 | fa163e0d-6f45-64a1-ca76-000000002501 | SKIPPED | collect in-use lvm2 devices list | standalone 2026-01-22 12:44:45.919730 | fa163e0d-6f45-64a1-ca76-000000002501 | TIMING | tripleo_lvmfilter : collect in-use lvm2 devices list | standalone | 0:05:39.884748 | 0.04s 2026-01-22 12:44:45.940388 | fa163e0d-6f45-64a1-ca76-000000002502 | TASK | set allowed_devices 2026-01-22 12:44:45.982455 | fa163e0d-6f45-64a1-ca76-000000002502 | SKIPPED | set allowed_devices | standalone 2026-01-22 12:44:45.984103 | fa163e0d-6f45-64a1-ca76-000000002502 | TIMING | tripleo_lvmfilter : set allowed_devices | standalone | 0:05:39.949103 | 0.04s 2026-01-22 12:44:46.010164 | fa163e0d-6f45-64a1-ca76-000000002503 | TASK | build lvm2 allow list 2026-01-22 12:44:46.053234 | fa163e0d-6f45-64a1-ca76-000000002503 | SKIPPED | build lvm2 allow list | standalone 2026-01-22 12:44:46.054544 | fa163e0d-6f45-64a1-ca76-000000002503 | TIMING | tripleo_lvmfilter : build lvm2 allow list | standalone | 0:05:40.019550 | 0.04s 2026-01-22 12:44:46.080699 | fa163e0d-6f45-64a1-ca76-000000002504 | TASK | build lvm2 deny list 2026-01-22 12:44:46.131935 | fa163e0d-6f45-64a1-ca76-000000002504 | SKIPPED | build lvm2 deny list | standalone 2026-01-22 12:44:46.133321 | fa163e0d-6f45-64a1-ca76-000000002504 | TIMING | tripleo_lvmfilter : build lvm2 deny list | standalone | 0:05:40.098327 | 0.05s 2026-01-22 12:44:46.159043 | fa163e0d-6f45-64a1-ca76-000000002505 | TASK | build lvm2 filter 2026-01-22 12:44:46.208804 | fa163e0d-6f45-64a1-ca76-000000002505 | SKIPPED | build lvm2 filter | standalone 2026-01-22 12:44:46.210188 | fa163e0d-6f45-64a1-ca76-000000002505 | TIMING | tripleo_lvmfilter : build lvm2 filter | standalone | 0:05:40.175195 | 0.05s 2026-01-22 12:44:46.237563 | fa163e0d-6f45-64a1-ca76-000000002506 | TASK | regenerate lvm config 2026-01-22 12:44:46.289620 | fa163e0d-6f45-64a1-ca76-000000002506 | SKIPPED | regenerate lvm config | standalone 2026-01-22 12:44:46.291036 | fa163e0d-6f45-64a1-ca76-000000002506 | TIMING | tripleo_lvmfilter : regenerate lvm config | standalone | 0:05:40.256042 | 0.05s 2026-01-22 12:44:46.316914 | fa163e0d-6f45-64a1-ca76-000000002507 | TASK | copy new lvm.conf in place 2026-01-22 12:44:46.368910 | fa163e0d-6f45-64a1-ca76-000000002507 | SKIPPED | copy new lvm.conf in place | standalone 2026-01-22 12:44:46.371222 | fa163e0d-6f45-64a1-ca76-000000002507 | TIMING | tripleo_lvmfilter : copy new lvm.conf in place | standalone | 0:05:40.336229 | 0.05s 2026-01-22 12:44:46.424426 | fa163e0d-6f45-64a1-ca76-000000002251 | TASK | Fail if container image is undefined 2026-01-22 12:44:46.493202 | fa163e0d-6f45-64a1-ca76-000000002251 | SKIPPED | Fail if container image is undefined | standalone 2026-01-22 12:44:46.494706 | fa163e0d-6f45-64a1-ca76-000000002251 | TIMING | tripleo_container_tag : Fail if container image is undefined | standalone | 0:05:40.459711 | 0.07s 2026-01-22 12:44:46.538553 | fa163e0d-6f45-64a1-ca76-000000002252 | TASK | Pull registry.redhat.io/rhosp-rhel9/openstack-manila-share:17.1 image 2026-01-22 12:44:55.066724 | fa163e0d-6f45-64a1-ca76-000000002252 | CHANGED | Pull registry.redhat.io/rhosp-rhel9/openstack-manila-share:17.1 image | standalone 2026-01-22 12:44:55.068434 | fa163e0d-6f45-64a1-ca76-000000002252 | TIMING | tripleo_container_tag : Pull registry.redhat.io/rhosp-rhel9/openstack-manila-share:17.1 image | standalone | 0:05:49.033442 | 8.53s 2026-01-22 12:44:55.126958 | fa163e0d-6f45-64a1-ca76-000000002253 | TASK | Tag cluster.common.tag/manila-share:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-manila-share:17.1 image 2026-01-22 12:44:55.499204 | fa163e0d-6f45-64a1-ca76-000000002253 | CHANGED | Tag cluster.common.tag/manila-share:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-manila-share:17.1 image | standalone 2026-01-22 12:44:55.500767 | fa163e0d-6f45-64a1-ca76-000000002253 | TIMING | tripleo_container_tag : Tag cluster.common.tag/manila-share:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-manila-share:17.1 image | standalone | 0:05:49.465775 | 0.37s 2026-01-22 12:44:55.602494 | fa163e0d-6f45-64a1-ca76-00000000225e | TIMING | include_role : linux-system-roles.certificate | standalone | 0:05:49.567492 | 0.05s 2026-01-22 12:44:55.662853 | fa163e0d-6f45-64a1-ca76-000000002576 | TASK | Set version specific variables 2026-01-22 12:44:55.711229 | fa163e0d-6f45-64a1-ca76-000000002576 | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:05:49.676233 | 0.05s 2026-01-22 12:44:55.730239 | f81bfdc2-9f07-4492-bac8-c0a894a744b2 | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:44:55.760741 | fa163e0d-6f45-64a1-ca76-0000000025a5 | TASK | Ensure ansible_facts used by role 2026-01-22 12:44:56.180296 | fa163e0d-6f45-64a1-ca76-0000000025a5 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:44:56.181922 | fa163e0d-6f45-64a1-ca76-0000000025a5 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:05:50.146930 | 0.42s 2026-01-22 12:44:56.212390 | fa163e0d-6f45-64a1-ca76-0000000025a6 | TASK | Set platform/version specific variables 2026-01-22 12:44:56.290954 | fa163e0d-6f45-64a1-ca76-0000000025a6 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:44:56.308708 | fa163e0d-6f45-64a1-ca76-0000000025a6 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:44:56.326892 | fa163e0d-6f45-64a1-ca76-0000000025a6 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:44:56.341533 | fa163e0d-6f45-64a1-ca76-0000000025a6 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:44:56.353923 | fa163e0d-6f45-64a1-ca76-0000000025a6 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:05:50.318923 | 0.14s 2026-01-22 12:44:56.381234 | fa163e0d-6f45-64a1-ca76-000000002577 | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:44:59.342159 | fa163e0d-6f45-64a1-ca76-000000002577 | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:44:59.343582 | fa163e0d-6f45-64a1-ca76-000000002577 | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:05:53.308596 | 2.96s 2026-01-22 12:44:59.364306 | fa163e0d-6f45-64a1-ca76-000000002579 | TASK | Ensure provider packages are installed 2026-01-22 12:45:02.385278 | fa163e0d-6f45-64a1-ca76-000000002579 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:05:56.350276 | 3.02s 2026-01-22 12:45:02.397483 | fa163e0d-6f45-64a1-ca76-000000002579 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:05:56.362486 | 3.03s 2026-01-22 12:45:02.423472 | fa163e0d-6f45-64a1-ca76-00000000257b | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:45:02.710177 | fa163e0d-6f45-64a1-ca76-00000000257b | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:45:02.712745 | fa163e0d-6f45-64a1-ca76-00000000257b | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:05:56.677749 | 0.29s 2026-01-22 12:45:02.722600 | fa163e0d-6f45-64a1-ca76-00000000257b | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:05:56.687602 | 0.30s 2026-01-22 12:45:02.748871 | fa163e0d-6f45-64a1-ca76-00000000257d | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:45:03.033523 | fa163e0d-6f45-64a1-ca76-00000000257d | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:45:03.035479 | fa163e0d-6f45-64a1-ca76-00000000257d | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:05:57.000492 | 0.29s 2026-01-22 12:45:03.045592 | fa163e0d-6f45-64a1-ca76-00000000257d | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:05:57.010593 | 0.30s 2026-01-22 12:45:03.071380 | fa163e0d-6f45-64a1-ca76-00000000257f | TASK | Ensure provider service is running 2026-01-22 12:45:03.484602 | fa163e0d-6f45-64a1-ca76-00000000257f | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:45:03.487436 | fa163e0d-6f45-64a1-ca76-00000000257f | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:05:57.452441 | 0.41s 2026-01-22 12:45:03.498327 | fa163e0d-6f45-64a1-ca76-00000000257f | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:05:57.463329 | 0.43s 2026-01-22 12:45:03.525823 | fa163e0d-6f45-64a1-ca76-000000002582 | TASK | Ensure certificate requests 2026-01-22 12:45:05.632316 | fa163e0d-6f45-64a1-ca76-000000002582 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'memcached', 'principal': 'memcached/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': 'container_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep memcached)\nservice_crt="/etc/pki/tls/certs/memcached.crt"\nservice_key="/etc/pki/tls/private/memcached.key"\n# Copy the new cert from the mount-point to the real path\npodman exec -u root "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_crt" "$service_crt"\n# Copy the new key from the mount-point to the real path\npodman exec -u root "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_key" "$service_key"\n# Set appropriate permissions\npodman exec -u root "$container_name" chown memcached:memcached "$service_crt"\npodman exec -u root "$container_name" chown memcached:memcached "$service_key"\n# Send refresh_certs command to memcached to read the new certificate\nmemcached_ip="$(hiera -c /etc/puppet/hiera.yaml memcached::listen_ip.0 127.0.0.1)"\nmemcached_port="$(hiera -c /etc/puppet/hiera.yaml memcached::tcp_port 11211)"\necho refresh_certs | openssl s_client -connect $memcached_ip:$memcached_port\n'} 2026-01-22 12:45:05.634678 | fa163e0d-6f45-64a1-ca76-000000002582 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:05:59.599670 | 2.11s 2026-01-22 12:45:05.645347 | fa163e0d-6f45-64a1-ca76-000000002582 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:05:59.610348 | 2.12s 2026-01-22 12:45:05.754179 | fa163e0d-6f45-64a1-ca76-000000002260 | TIMING | include_role : linux-system-roles.certificate | standalone | 0:05:59.719178 | 0.05s 2026-01-22 12:45:05.831436 | fa163e0d-6f45-64a1-ca76-0000000025ea | TASK | Set version specific variables 2026-01-22 12:45:05.891241 | fa163e0d-6f45-64a1-ca76-0000000025ea | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:05:59.856247 | 0.06s 2026-01-22 12:45:05.904153 | 4b10cc90-62a7-4734-83c4-dce6d56a4e5a | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:45:05.927864 | fa163e0d-6f45-64a1-ca76-000000002619 | TASK | Ensure ansible_facts used by role 2026-01-22 12:45:06.342688 | fa163e0d-6f45-64a1-ca76-000000002619 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:45:06.344203 | fa163e0d-6f45-64a1-ca76-000000002619 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:06:00.309212 | 0.41s 2026-01-22 12:45:06.371704 | fa163e0d-6f45-64a1-ca76-00000000261a | TASK | Set platform/version specific variables 2026-01-22 12:45:06.459959 | fa163e0d-6f45-64a1-ca76-00000000261a | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:45:06.492329 | fa163e0d-6f45-64a1-ca76-00000000261a | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:45:06.522464 | fa163e0d-6f45-64a1-ca76-00000000261a | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:45:06.542390 | fa163e0d-6f45-64a1-ca76-00000000261a | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:45:06.548442 | fa163e0d-6f45-64a1-ca76-00000000261a | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:06:00.513443 | 0.18s 2026-01-22 12:45:06.574250 | fa163e0d-6f45-64a1-ca76-0000000025eb | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:45:09.500930 | fa163e0d-6f45-64a1-ca76-0000000025eb | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:45:09.502566 | fa163e0d-6f45-64a1-ca76-0000000025eb | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:06:03.467574 | 2.93s 2026-01-22 12:45:09.527857 | fa163e0d-6f45-64a1-ca76-0000000025ed | TASK | Ensure provider packages are installed 2026-01-22 12:45:12.552098 | fa163e0d-6f45-64a1-ca76-0000000025ed | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:06:06.517088 | 3.02s 2026-01-22 12:45:12.564637 | fa163e0d-6f45-64a1-ca76-0000000025ed | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:06:06.529631 | 3.04s 2026-01-22 12:45:12.591493 | fa163e0d-6f45-64a1-ca76-0000000025ef | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:45:12.861380 | fa163e0d-6f45-64a1-ca76-0000000025ef | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:45:12.864107 | fa163e0d-6f45-64a1-ca76-0000000025ef | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:06:06.829106 | 0.27s 2026-01-22 12:45:12.873923 | fa163e0d-6f45-64a1-ca76-0000000025ef | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:06:06.838893 | 0.28s 2026-01-22 12:45:12.900642 | fa163e0d-6f45-64a1-ca76-0000000025f1 | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:45:13.210139 | fa163e0d-6f45-64a1-ca76-0000000025f1 | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:45:13.211843 | fa163e0d-6f45-64a1-ca76-0000000025f1 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:06:07.176845 | 0.31s 2026-01-22 12:45:13.223993 | fa163e0d-6f45-64a1-ca76-0000000025f1 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:06:07.189000 | 0.32s 2026-01-22 12:45:13.244625 | fa163e0d-6f45-64a1-ca76-0000000025f3 | TASK | Ensure provider service is running 2026-01-22 12:45:13.682078 | fa163e0d-6f45-64a1-ca76-0000000025f3 | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:45:13.684480 | fa163e0d-6f45-64a1-ca76-0000000025f3 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:06:07.649487 | 0.44s 2026-01-22 12:45:13.695544 | fa163e0d-6f45-64a1-ca76-0000000025f3 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:06:07.660539 | 0.45s 2026-01-22 12:45:13.723949 | fa163e0d-6f45-64a1-ca76-0000000025f6 | TASK | Ensure certificate requests 2026-01-22 12:45:15.207767 | fa163e0d-6f45-64a1-ca76-0000000025f6 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': ['standalone.internalapi.ooo.test', 'overcloud.internalapi.ooo.test'], 'key_size': '2048', 'name': 'mysql', 'principal': 'mysql/standalone.internalapi.ooo.test@OOO.TEST'} 2026-01-22 12:45:15.209685 | fa163e0d-6f45-64a1-ca76-0000000025f6 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:06:09.174657 | 1.48s 2026-01-22 12:45:15.216380 | fa163e0d-6f45-64a1-ca76-0000000025f6 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:06:09.181386 | 1.49s 2026-01-22 12:45:15.250410 | fa163e0d-6f45-64a1-ca76-000000002267 | TASK | Fail if container image is undefined 2026-01-22 12:45:15.321314 | fa163e0d-6f45-64a1-ca76-000000002267 | SKIPPED | Fail if container image is undefined | standalone 2026-01-22 12:45:15.323609 | fa163e0d-6f45-64a1-ca76-000000002267 | TIMING | tripleo_container_tag : Fail if container image is undefined | standalone | 0:06:09.288609 | 0.07s 2026-01-22 12:45:15.367517 | fa163e0d-6f45-64a1-ca76-000000002268 | TASK | Pull registry.redhat.io/rhosp-rhel9/openstack-mariadb:17.1 image 2026-01-22 12:45:23.223809 | fa163e0d-6f45-64a1-ca76-000000002268 | CHANGED | Pull registry.redhat.io/rhosp-rhel9/openstack-mariadb:17.1 image | standalone 2026-01-22 12:45:23.225898 | fa163e0d-6f45-64a1-ca76-000000002268 | TIMING | tripleo_container_tag : Pull registry.redhat.io/rhosp-rhel9/openstack-mariadb:17.1 image | standalone | 0:06:17.190854 | 7.86s 2026-01-22 12:45:23.280880 | fa163e0d-6f45-64a1-ca76-000000002269 | TASK | Tag cluster.common.tag/mariadb:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-mariadb:17.1 image 2026-01-22 12:45:23.677950 | fa163e0d-6f45-64a1-ca76-000000002269 | CHANGED | Tag cluster.common.tag/mariadb:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-mariadb:17.1 image | standalone 2026-01-22 12:45:23.679345 | fa163e0d-6f45-64a1-ca76-000000002269 | TIMING | tripleo_container_tag : Tag cluster.common.tag/mariadb:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-mariadb:17.1 image | standalone | 0:06:17.644352 | 0.40s 2026-01-22 12:45:23.779482 | fa163e0d-6f45-64a1-ca76-000000002274 | TIMING | include_role : linux-system-roles.certificate | standalone | 0:06:17.744486 | 0.05s 2026-01-22 12:45:23.845242 | fa163e0d-6f45-64a1-ca76-000000002664 | TASK | Set version specific variables 2026-01-22 12:45:23.893902 | fa163e0d-6f45-64a1-ca76-000000002664 | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:06:17.858903 | 0.05s 2026-01-22 12:45:23.912737 | 33734bd0-c369-4989-b3e8-2df986e46ac8 | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:45:23.943139 | fa163e0d-6f45-64a1-ca76-000000002693 | TASK | Ensure ansible_facts used by role 2026-01-22 12:45:24.354284 | fa163e0d-6f45-64a1-ca76-000000002693 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:45:24.355971 | fa163e0d-6f45-64a1-ca76-000000002693 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:06:18.320979 | 0.41s 2026-01-22 12:45:24.382918 | fa163e0d-6f45-64a1-ca76-000000002694 | TASK | Set platform/version specific variables 2026-01-22 12:45:24.458487 | fa163e0d-6f45-64a1-ca76-000000002694 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:45:24.476582 | fa163e0d-6f45-64a1-ca76-000000002694 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:45:24.496604 | fa163e0d-6f45-64a1-ca76-000000002694 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:45:24.511736 | fa163e0d-6f45-64a1-ca76-000000002694 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:45:24.518251 | fa163e0d-6f45-64a1-ca76-000000002694 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:06:18.483251 | 0.13s 2026-01-22 12:45:24.544816 | fa163e0d-6f45-64a1-ca76-000000002665 | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:45:27.642864 | fa163e0d-6f45-64a1-ca76-000000002665 | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:45:27.644220 | fa163e0d-6f45-64a1-ca76-000000002665 | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:06:21.609234 | 3.10s 2026-01-22 12:45:27.665333 | fa163e0d-6f45-64a1-ca76-000000002667 | TASK | Ensure provider packages are installed 2026-01-22 12:45:30.684097 | fa163e0d-6f45-64a1-ca76-000000002667 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:06:24.649089 | 3.02s 2026-01-22 12:45:30.696224 | fa163e0d-6f45-64a1-ca76-000000002667 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:06:24.661227 | 3.03s 2026-01-22 12:45:30.722402 | fa163e0d-6f45-64a1-ca76-000000002669 | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:45:31.038991 | fa163e0d-6f45-64a1-ca76-000000002669 | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:45:31.040686 | fa163e0d-6f45-64a1-ca76-000000002669 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:06:25.005662 | 0.32s 2026-01-22 12:45:31.052799 | fa163e0d-6f45-64a1-ca76-000000002669 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:06:25.017802 | 0.33s 2026-01-22 12:45:31.079692 | fa163e0d-6f45-64a1-ca76-00000000266b | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:45:31.367557 | fa163e0d-6f45-64a1-ca76-00000000266b | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:45:31.369959 | fa163e0d-6f45-64a1-ca76-00000000266b | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:06:25.334964 | 0.29s 2026-01-22 12:45:31.379758 | fa163e0d-6f45-64a1-ca76-00000000266b | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:06:25.344758 | 0.30s 2026-01-22 12:45:31.406248 | fa163e0d-6f45-64a1-ca76-00000000266d | TASK | Ensure provider service is running 2026-01-22 12:45:31.856591 | fa163e0d-6f45-64a1-ca76-00000000266d | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:45:31.859588 | fa163e0d-6f45-64a1-ca76-00000000266d | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:06:25.824591 | 0.45s 2026-01-22 12:45:31.869789 | fa163e0d-6f45-64a1-ca76-00000000266d | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:06:25.834792 | 0.46s 2026-01-22 12:45:31.897359 | fa163e0d-6f45-64a1-ca76-000000002670 | TASK | Ensure certificate requests 2026-01-22 12:45:33.475433 | fa163e0d-6f45-64a1-ca76-000000002670 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'neutron_ovn', 'principal': 'neutron_ovn/standalone.internalapi.ooo.test@OOO.TEST'} 2026-01-22 12:45:33.477056 | fa163e0d-6f45-64a1-ca76-000000002670 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:06:27.442063 | 1.58s 2026-01-22 12:45:33.484079 | fa163e0d-6f45-64a1-ca76-000000002670 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:06:27.449080 | 1.59s 2026-01-22 12:45:33.585985 | fa163e0d-6f45-64a1-ca76-000000002276 | TIMING | include_role : linux-system-roles.certificate | standalone | 0:06:27.550982 | 0.05s 2026-01-22 12:45:33.654012 | fa163e0d-6f45-64a1-ca76-0000000026d8 | TASK | Set version specific variables 2026-01-22 12:45:33.703481 | fa163e0d-6f45-64a1-ca76-0000000026d8 | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:06:27.668477 | 0.05s 2026-01-22 12:45:33.726156 | b909fc6d-e8ca-45a9-a57f-c48f2ce2cc6f | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:45:33.756630 | fa163e0d-6f45-64a1-ca76-000000002707 | TASK | Ensure ansible_facts used by role 2026-01-22 12:45:34.164692 | fa163e0d-6f45-64a1-ca76-000000002707 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:45:34.166310 | fa163e0d-6f45-64a1-ca76-000000002707 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:06:28.131318 | 0.41s 2026-01-22 12:45:34.193034 | fa163e0d-6f45-64a1-ca76-000000002708 | TASK | Set platform/version specific variables 2026-01-22 12:45:34.277596 | fa163e0d-6f45-64a1-ca76-000000002708 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:45:34.318103 | fa163e0d-6f45-64a1-ca76-000000002708 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:45:34.342469 | fa163e0d-6f45-64a1-ca76-000000002708 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:45:34.363093 | fa163e0d-6f45-64a1-ca76-000000002708 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:45:34.366374 | fa163e0d-6f45-64a1-ca76-000000002708 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:06:28.331375 | 0.17s 2026-01-22 12:45:34.391879 | fa163e0d-6f45-64a1-ca76-0000000026d9 | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:45:37.423929 | fa163e0d-6f45-64a1-ca76-0000000026d9 | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:45:37.425354 | fa163e0d-6f45-64a1-ca76-0000000026d9 | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:06:31.390361 | 3.03s 2026-01-22 12:45:37.451168 | fa163e0d-6f45-64a1-ca76-0000000026db | TASK | Ensure provider packages are installed 2026-01-22 12:45:40.471878 | fa163e0d-6f45-64a1-ca76-0000000026db | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:06:34.436869 | 3.02s 2026-01-22 12:45:40.478517 | fa163e0d-6f45-64a1-ca76-0000000026db | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:06:34.443516 | 3.03s 2026-01-22 12:45:40.505285 | fa163e0d-6f45-64a1-ca76-0000000026dd | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:45:40.826049 | fa163e0d-6f45-64a1-ca76-0000000026dd | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:45:40.828425 | fa163e0d-6f45-64a1-ca76-0000000026dd | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:06:34.793429 | 0.32s 2026-01-22 12:45:40.838240 | fa163e0d-6f45-64a1-ca76-0000000026dd | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:06:34.803242 | 0.33s 2026-01-22 12:45:40.865319 | fa163e0d-6f45-64a1-ca76-0000000026df | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:45:41.149937 | fa163e0d-6f45-64a1-ca76-0000000026df | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:45:41.152099 | fa163e0d-6f45-64a1-ca76-0000000026df | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:06:35.117101 | 0.29s 2026-01-22 12:45:41.162068 | fa163e0d-6f45-64a1-ca76-0000000026df | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:06:35.127068 | 0.30s 2026-01-22 12:45:41.183778 | fa163e0d-6f45-64a1-ca76-0000000026e1 | TASK | Ensure provider service is running 2026-01-22 12:45:42.613444 | fa163e0d-6f45-64a1-ca76-0000000026e1 | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:45:42.616092 | fa163e0d-6f45-64a1-ca76-0000000026e1 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:06:36.581098 | 1.43s 2026-01-22 12:45:42.626639 | fa163e0d-6f45-64a1-ca76-0000000026e1 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:06:36.591635 | 1.44s 2026-01-22 12:45:42.653557 | fa163e0d-6f45-64a1-ca76-0000000026e4 | TASK | Ensure certificate requests 2026-01-22 12:45:44.304570 | fa163e0d-6f45-64a1-ca76-0000000026e4 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'neutron', 'principal': 'neutron/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': 'container_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep neutron_dhcp)\n# The certificate is also installed on the computes, but neutron_dhcp is only\n# present on the controllers, so we exit if the container could not be found.\n[[ -z $container_name ]] && exit 0\n\nservice_crt="/etc/pki/tls/certs/neutron.crt"\nservice_key="/etc/pki/tls/private/neutron.key"\n# Copy the new cert from the mount-point to the real path\npodman exec -u root "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_crt" "$service_crt"\n# Copy the new key from the mount-point to the real path\npodman exec -u root "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_key "$service_key"\n# No need to trigger a reload for neutron dhcpd since the cert is not cached\n'} 2026-01-22 12:45:44.306940 | fa163e0d-6f45-64a1-ca76-0000000026e4 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:06:38.271946 | 1.65s 2026-01-22 12:45:44.317474 | fa163e0d-6f45-64a1-ca76-0000000026e4 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:06:38.282479 | 1.66s 2026-01-22 12:45:44.366358 | fa163e0d-6f45-64a1-ca76-000000002278 | TASK | install systemd-container for a greenfield 2026-01-22 12:46:02.164944 | fa163e0d-6f45-64a1-ca76-000000002278 | CHANGED | install systemd-container for a greenfield | standalone 2026-01-22 12:46:02.166775 | fa163e0d-6f45-64a1-ca76-000000002278 | TIMING | install systemd-container for a greenfield | standalone | 0:06:56.131780 | 17.80s 2026-01-22 12:46:02.192862 | fa163e0d-6f45-64a1-ca76-000000002279 | TASK | manage PMEM namespaces for vPMEM 2026-01-22 12:46:02.248486 | fa163e0d-6f45-64a1-ca76-000000002279 | SKIPPED | manage PMEM namespaces for vPMEM | standalone 2026-01-22 12:46:02.249817 | fa163e0d-6f45-64a1-ca76-000000002279 | TIMING | manage PMEM namespaces for vPMEM | standalone | 0:06:56.214824 | 0.06s 2026-01-22 12:46:02.274884 | fa163e0d-6f45-64a1-ca76-00000000227b | TASK | Enable post-copy by setting unprivileged_userfaultfd 2026-01-22 12:46:02.548372 | fa163e0d-6f45-64a1-ca76-00000000227b | CHANGED | Enable post-copy by setting unprivileged_userfaultfd | standalone 2026-01-22 12:46:02.549477 | fa163e0d-6f45-64a1-ca76-00000000227b | TIMING | Enable post-copy by setting unprivileged_userfaultfd | standalone | 0:06:56.514484 | 0.27s 2026-01-22 12:46:02.573706 | fa163e0d-6f45-64a1-ca76-00000000227d | TASK | is KSM enabled 2026-01-22 12:46:02.633791 | fa163e0d-6f45-64a1-ca76-00000000227d | OK | is KSM enabled | standalone 2026-01-22 12:46:02.634857 | fa163e0d-6f45-64a1-ca76-00000000227d | TIMING | is KSM enabled | standalone | 0:06:56.599865 | 0.06s 2026-01-22 12:46:02.660020 | fa163e0d-6f45-64a1-ca76-00000000227f | TASK | Check for ksm 2026-01-22 12:46:02.913017 | fa163e0d-6f45-64a1-ca76-00000000227f | CHANGED | Check for ksm | standalone 2026-01-22 12:46:02.914107 | fa163e0d-6f45-64a1-ca76-00000000227f | TIMING | Check for ksm | standalone | 0:06:56.879121 | 0.25s 2026-01-22 12:46:02.933515 | fa163e0d-6f45-64a1-ca76-000000002280 | TASK | disable KSM services 2026-01-22 12:46:03.008555 | fa163e0d-6f45-64a1-ca76-000000002280 | SKIPPED | disable KSM services | standalone | item=ksm.service 2026-01-22 12:46:03.032288 | fa163e0d-6f45-64a1-ca76-000000002280 | SKIPPED | disable KSM services | standalone | item=ksmtuned.service 2026-01-22 12:46:03.037796 | fa163e0d-6f45-64a1-ca76-000000002280 | TIMING | disable KSM services | standalone | 0:06:57.002800 | 0.10s 2026-01-22 12:46:03.058787 | fa163e0d-6f45-64a1-ca76-000000002281 | TASK | delete PageKSM after disable ksm on compute 2026-01-22 12:46:03.107504 | fa163e0d-6f45-64a1-ca76-000000002281 | SKIPPED | delete PageKSM after disable ksm on compute | standalone 2026-01-22 12:46:03.108624 | fa163e0d-6f45-64a1-ca76-000000002281 | TIMING | delete PageKSM after disable ksm on compute | standalone | 0:06:57.073638 | 0.05s 2026-01-22 12:46:03.128475 | fa163e0d-6f45-64a1-ca76-000000002283 | TASK | make sure package providing ksmtuned is installed (RHEL8 or CentOS8) 2026-01-22 12:46:03.166799 | fa163e0d-6f45-64a1-ca76-000000002283 | SKIPPED | make sure package providing ksmtuned is installed (RHEL8 or CentOS8) | standalone 2026-01-22 12:46:03.167864 | fa163e0d-6f45-64a1-ca76-000000002283 | TIMING | make sure package providing ksmtuned is installed (RHEL8 or CentOS8) | standalone | 0:06:57.132877 | 0.04s 2026-01-22 12:46:03.187758 | fa163e0d-6f45-64a1-ca76-000000002284 | TASK | make sure package providing ksmtuned is installed (RHEL9 or CentOS9) 2026-01-22 12:46:03.226094 | fa163e0d-6f45-64a1-ca76-000000002284 | SKIPPED | make sure package providing ksmtuned is installed (RHEL9 or CentOS9) | standalone 2026-01-22 12:46:03.228307 | fa163e0d-6f45-64a1-ca76-000000002284 | TIMING | make sure package providing ksmtuned is installed (RHEL9 or CentOS9) | standalone | 0:06:57.193319 | 0.04s 2026-01-22 12:46:03.247511 | fa163e0d-6f45-64a1-ca76-000000002285 | TASK | enable ksmtunded 2026-01-22 12:46:03.317301 | fa163e0d-6f45-64a1-ca76-000000002285 | SKIPPED | enable ksmtunded | standalone | item=ksm.service 2026-01-22 12:46:03.347107 | fa163e0d-6f45-64a1-ca76-000000002285 | SKIPPED | enable ksmtunded | standalone | item=ksmtuned.service 2026-01-22 12:46:03.353261 | fa163e0d-6f45-64a1-ca76-000000002285 | TIMING | enable ksmtunded | standalone | 0:06:57.318267 | 0.10s 2026-01-22 12:46:03.373847 | fa163e0d-6f45-64a1-ca76-000000002287 | TASK | Create dirs for certificates and keys 2026-01-22 12:46:03.626159 | fa163e0d-6f45-64a1-ca76-000000002287 | CHANGED | Create dirs for certificates and keys | standalone | item=/etc/pki/libvirt 2026-01-22 12:46:03.628502 | fa163e0d-6f45-64a1-ca76-000000002287 | TIMING | Create dirs for certificates and keys | standalone | 0:06:57.593503 | 0.25s 2026-01-22 12:46:03.845169 | fa163e0d-6f45-64a1-ca76-000000002287 | CHANGED | Create dirs for certificates and keys | standalone | item=/etc/pki/libvirt/private 2026-01-22 12:46:03.845996 | fa163e0d-6f45-64a1-ca76-000000002287 | TIMING | Create dirs for certificates and keys | standalone | 0:06:57.811012 | 0.47s 2026-01-22 12:46:04.044702 | fa163e0d-6f45-64a1-ca76-000000002287 | CHANGED | Create dirs for certificates and keys | standalone | item=/etc/pki/qemu 2026-01-22 12:46:04.046609 | fa163e0d-6f45-64a1-ca76-000000002287 | TIMING | Create dirs for certificates and keys | standalone | 0:06:58.011619 | 0.67s 2026-01-22 12:46:04.057905 | fa163e0d-6f45-64a1-ca76-000000002287 | TIMING | Create dirs for certificates and keys | standalone | 0:06:58.022910 | 0.68s 2026-01-22 12:46:04.134227 | fa163e0d-6f45-64a1-ca76-000000002288 | TIMING | include_role : linux-system-roles.certificate | standalone | 0:06:58.099227 | 0.05s 2026-01-22 12:46:04.198545 | fa163e0d-6f45-64a1-ca76-000000002774 | TASK | Set version specific variables 2026-01-22 12:46:04.246450 | fa163e0d-6f45-64a1-ca76-000000002774 | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:06:58.211451 | 0.05s 2026-01-22 12:46:04.265207 | f06ac6ca-794d-4459-9426-505254c7e082 | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:46:04.295503 | fa163e0d-6f45-64a1-ca76-0000000027a7 | TASK | Ensure ansible_facts used by role 2026-01-22 12:46:04.699938 | fa163e0d-6f45-64a1-ca76-0000000027a7 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:46:04.701112 | fa163e0d-6f45-64a1-ca76-0000000027a7 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:06:58.666126 | 0.40s 2026-01-22 12:46:04.726982 | fa163e0d-6f45-64a1-ca76-0000000027a8 | TASK | Set platform/version specific variables 2026-01-22 12:46:04.782082 | fa163e0d-6f45-64a1-ca76-0000000027a8 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:46:04.798099 | fa163e0d-6f45-64a1-ca76-0000000027a8 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:46:04.811706 | fa163e0d-6f45-64a1-ca76-0000000027a8 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:46:04.822102 | fa163e0d-6f45-64a1-ca76-0000000027a8 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:46:04.828560 | fa163e0d-6f45-64a1-ca76-0000000027a8 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:06:58.793554 | 0.10s 2026-01-22 12:46:04.854222 | fa163e0d-6f45-64a1-ca76-000000002775 | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:46:07.827802 | fa163e0d-6f45-64a1-ca76-000000002775 | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:46:07.829340 | fa163e0d-6f45-64a1-ca76-000000002775 | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:07:01.794345 | 2.97s 2026-01-22 12:46:07.855502 | fa163e0d-6f45-64a1-ca76-000000002777 | TASK | Ensure provider packages are installed 2026-01-22 12:46:10.831131 | fa163e0d-6f45-64a1-ca76-000000002777 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:07:04.796127 | 2.97s 2026-01-22 12:46:10.838107 | fa163e0d-6f45-64a1-ca76-000000002777 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:07:04.803109 | 2.98s 2026-01-22 12:46:10.864180 | fa163e0d-6f45-64a1-ca76-000000002779 | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:46:11.150933 | fa163e0d-6f45-64a1-ca76-000000002779 | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:46:11.152723 | fa163e0d-6f45-64a1-ca76-000000002779 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:07:05.117728 | 0.29s 2026-01-22 12:46:11.162136 | fa163e0d-6f45-64a1-ca76-000000002779 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:07:05.127151 | 0.30s 2026-01-22 12:46:11.179794 | fa163e0d-6f45-64a1-ca76-00000000277b | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:46:11.463870 | fa163e0d-6f45-64a1-ca76-00000000277b | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:46:11.466778 | fa163e0d-6f45-64a1-ca76-00000000277b | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:07:05.431774 | 0.29s 2026-01-22 12:46:11.476224 | fa163e0d-6f45-64a1-ca76-00000000277b | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:07:05.441231 | 0.30s 2026-01-22 12:46:11.502129 | fa163e0d-6f45-64a1-ca76-00000000277d | TASK | Ensure provider service is running 2026-01-22 12:46:11.906708 | fa163e0d-6f45-64a1-ca76-00000000277d | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:46:11.908623 | fa163e0d-6f45-64a1-ca76-00000000277d | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:07:05.873627 | 0.41s 2026-01-22 12:46:11.916992 | fa163e0d-6f45-64a1-ca76-00000000277d | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:07:05.882001 | 0.41s 2026-01-22 12:46:11.938489 | fa163e0d-6f45-64a1-ca76-000000002780 | TASK | Ensure certificate requests 2026-01-22 12:46:14.048859 | fa163e0d-6f45-64a1-ca76-000000002780 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'libvirt-server-cert', 'principal': 'libvirt/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': '# Copy cert and key to libvirt dirs\ncp /etc/ipa/ca.crt /etc/pki/CA/cacert.pem\nchown root:root /etc/pki/CA/cacert.pem\nchmod 644 /etc/pki/CA/cacert.pem\ncp /etc/pki/tls/certs/libvirt-server-cert.crt /etc/pki/libvirt/servercert.pem\ncp /etc/pki/tls/private/libvirt-server-cert.key /etc/pki/libvirt/private/serverkey.pem\npodman exec nova_virtproxyd virt-admin server-update-tls virtproxyd || systemctl reload tripleo_nova_virtproxyd\n'} 2026-01-22 12:46:14.050517 | fa163e0d-6f45-64a1-ca76-000000002780 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:08.015517 | 2.11s 2026-01-22 12:46:15.587934 | fa163e0d-6f45-64a1-ca76-000000002780 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'libvirt-client-cert', 'principal': 'libvirt/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': '# Copy cert and key to libvirt dirs\ncp /etc/pki/tls/certs/libvirt-client-cert.crt /etc/pki/libvirt/clientcert.pem\ncp /etc/pki/tls/private/libvirt-client-cert.key /etc/pki/libvirt/private/clientkey.pem\npodman exec nova_virtproxyd virt-admin server-update-tls virtproxyd || systemctl reload tripleo_nova_virtproxyd\n'} 2026-01-22 12:46:15.590535 | fa163e0d-6f45-64a1-ca76-000000002780 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:09.555545 | 3.65s 2026-01-22 12:46:17.285048 | fa163e0d-6f45-64a1-ca76-000000002780 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'group': 'qemu', 'key_size': '2048', 'name': 'qemu-server-cert', 'owner': 'root', 'principal': 'qemu/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': '# Copy cert and key to qemu dir\ncp /etc/ipa/ca.crt /etc/pki/qemu/ca-cert.pem\nchown root:root /etc/pki/qemu/ca-cert.pem\nchmod 644 /etc/pki/qemu/ca-cert.pem\ncp -a /etc/pki/tls/certs/qemu-server-cert.crt /etc/pki/qemu/server-cert.pem\ncp -a /etc/pki/tls/private/qemu-server-cert.key /etc/pki/qemu/server-key.pem\nchgrp qemu /etc/pki/qemu/server-*\nchmod 0640 /etc/pki/qemu/server-cert.pem\nchmod 0640 /etc/pki/qemu/server-key.pem\n'} 2026-01-22 12:46:17.285879 | fa163e0d-6f45-64a1-ca76-000000002780 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:11.250895 | 5.35s 2026-01-22 12:46:19.255573 | fa163e0d-6f45-64a1-ca76-000000002780 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'group': 'qemu', 'key_size': '2048', 'name': 'qemu-client-cert', 'owner': 'root', 'principal': 'qemu/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': '# Copy cert and key to qemu dir\ncp -a /etc/pki/tls/certs/qemu-client-cert.crt /etc/pki/qemu/client-cert.pem\ncp -a /etc/pki/tls/private/qemu-client-cert.key /etc/pki/qemu/client-key.pem\nchgrp qemu /etc/pki/qemu/client-*\nchmod 0640 /etc/pki/qemu/client-cert.pem\nchmod 0640 /etc/pki/qemu/client-key.pem\n'} 2026-01-22 12:46:19.257591 | fa163e0d-6f45-64a1-ca76-000000002780 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:13.222601 | 7.32s 2026-01-22 12:46:19.269487 | fa163e0d-6f45-64a1-ca76-000000002780 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:13.234499 | 7.33s 2026-01-22 12:46:19.319434 | fa163e0d-6f45-64a1-ca76-00000000228a | TASK | Execute system role for Nova/Libvirt VNC certs 2026-01-22 12:46:19.405828 | fa163e0d-6f45-64a1-ca76-00000000228a | TIMING | Execute system role for Nova/Libvirt VNC certs | standalone | 0:07:13.370817 | 0.08s 2026-01-22 12:46:19.417416 | fa163e0d-6f45-64a1-ca76-00000000228a | TIMING | Execute system role for Nova/Libvirt VNC certs | standalone | 0:07:13.382427 | 0.10s 2026-01-22 12:46:19.429540 | fa163e0d-6f45-64a1-ca76-00000000228a | TIMING | Execute system role for Nova/Libvirt VNC certs | standalone | 0:07:13.394552 | 0.11s 2026-01-22 12:46:19.516278 | fa163e0d-6f45-64a1-ca76-0000000027fe | TASK | Set version specific variables 2026-01-22 12:46:19.564488 | fa163e0d-6f45-64a1-ca76-0000000027fe | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:07:13.529489 | 0.05s 2026-01-22 12:46:19.582885 | 1096918c-c6f5-48ab-841c-0f7242e81796 | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:46:19.613236 | fa163e0d-6f45-64a1-ca76-000000002856 | TASK | Ensure ansible_facts used by role 2026-01-22 12:46:20.020617 | fa163e0d-6f45-64a1-ca76-000000002856 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:46:20.022175 | fa163e0d-6f45-64a1-ca76-000000002856 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:07:13.987184 | 0.41s 2026-01-22 12:46:20.049991 | fa163e0d-6f45-64a1-ca76-000000002857 | TASK | Set platform/version specific variables 2026-01-22 12:46:20.130268 | fa163e0d-6f45-64a1-ca76-000000002857 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:46:20.148707 | fa163e0d-6f45-64a1-ca76-000000002857 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:46:20.166736 | fa163e0d-6f45-64a1-ca76-000000002857 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:46:20.180379 | fa163e0d-6f45-64a1-ca76-000000002857 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:46:20.192273 | fa163e0d-6f45-64a1-ca76-000000002857 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:07:14.157278 | 0.14s 2026-01-22 12:46:20.217745 | fa163e0d-6f45-64a1-ca76-0000000027ff | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:46:23.161435 | fa163e0d-6f45-64a1-ca76-0000000027ff | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:46:23.162742 | fa163e0d-6f45-64a1-ca76-0000000027ff | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:07:17.127750 | 2.94s 2026-01-22 12:46:23.187396 | fa163e0d-6f45-64a1-ca76-000000002801 | TASK | Ensure provider packages are installed 2026-01-22 12:46:26.093001 | fa163e0d-6f45-64a1-ca76-000000002801 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:07:20.057999 | 2.90s 2026-01-22 12:46:26.104901 | fa163e0d-6f45-64a1-ca76-000000002801 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:07:20.069910 | 2.92s 2026-01-22 12:46:26.130460 | fa163e0d-6f45-64a1-ca76-000000002803 | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:46:26.418769 | fa163e0d-6f45-64a1-ca76-000000002803 | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:46:26.420109 | fa163e0d-6f45-64a1-ca76-000000002803 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:07:20.385116 | 0.29s 2026-01-22 12:46:26.425990 | fa163e0d-6f45-64a1-ca76-000000002803 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:07:20.391001 | 0.29s 2026-01-22 12:46:26.452174 | fa163e0d-6f45-64a1-ca76-000000002805 | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:46:26.745996 | fa163e0d-6f45-64a1-ca76-000000002805 | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:46:26.747392 | fa163e0d-6f45-64a1-ca76-000000002805 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:07:20.712398 | 0.29s 2026-01-22 12:46:26.759396 | fa163e0d-6f45-64a1-ca76-000000002805 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:07:20.724407 | 0.31s 2026-01-22 12:46:26.785844 | fa163e0d-6f45-64a1-ca76-000000002807 | TASK | Ensure provider service is running 2026-01-22 12:46:27.213678 | fa163e0d-6f45-64a1-ca76-000000002807 | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:46:27.216409 | fa163e0d-6f45-64a1-ca76-000000002807 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:07:21.181415 | 0.43s 2026-01-22 12:46:27.226746 | fa163e0d-6f45-64a1-ca76-000000002807 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:07:21.191757 | 0.44s 2026-01-22 12:46:27.253871 | fa163e0d-6f45-64a1-ca76-00000000280a | TASK | Ensure certificate requests 2026-01-22 12:46:29.127386 | fa163e0d-6f45-64a1-ca76-00000000280a | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'libvirt-vnc-client-cert', 'principal': 'libvirt-vnc/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': 'container_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep nova_vnc_proxy)\nservice_crt="/etc/pki/tls/certs/libvirt-vnc-client-cert.crt"\nservice_key="/etc/pki/tls/private/libvirt-vnc-client-cert.key"\n# Copy the new cert from the mount-point to the real path\npodman exec -u root "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_crt" "$service_crt"\n# Copy the new key from the mount-point to the real path\npodman exec -u root "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_key" "$service_key"\n\n# Set permissions\npodman exec -u root "$container_name" chmod 0644 $service_crt\npodman exec -u root "$container_name" chmod 0640 $service_key\npodman exec -u root "$container_name" chgrp qemu $service_key\n\n# No need to trigger a reload for novnc proxy since the cert is not cached\n'} 2026-01-22 12:46:29.129869 | fa163e0d-6f45-64a1-ca76-00000000280a | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:23.094874 | 1.87s 2026-01-22 12:46:29.140277 | fa163e0d-6f45-64a1-ca76-00000000280a | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:23.105278 | 1.88s 2026-01-22 12:46:29.188978 | fa163e0d-6f45-64a1-ca76-000000002827 | TASK | Set version specific variables 2026-01-22 12:46:29.238322 | fa163e0d-6f45-64a1-ca76-000000002827 | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:07:23.203325 | 0.05s 2026-01-22 12:46:29.257739 | d25da674-2a04-4a39-a460-9653bac14530 | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:46:29.287375 | fa163e0d-6f45-64a1-ca76-000000002890 | TASK | Ensure ansible_facts used by role 2026-01-22 12:46:29.689284 | fa163e0d-6f45-64a1-ca76-000000002890 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:46:29.690146 | fa163e0d-6f45-64a1-ca76-000000002890 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:07:23.655164 | 0.40s 2026-01-22 12:46:29.707853 | fa163e0d-6f45-64a1-ca76-000000002891 | TASK | Set platform/version specific variables 2026-01-22 12:46:29.748633 | fa163e0d-6f45-64a1-ca76-000000002891 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:46:29.760227 | fa163e0d-6f45-64a1-ca76-000000002891 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:46:29.773835 | fa163e0d-6f45-64a1-ca76-000000002891 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:46:29.784324 | fa163e0d-6f45-64a1-ca76-000000002891 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:46:29.795567 | fa163e0d-6f45-64a1-ca76-000000002891 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:07:23.760579 | 0.09s 2026-01-22 12:46:29.812919 | fa163e0d-6f45-64a1-ca76-000000002828 | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:46:32.712065 | fa163e0d-6f45-64a1-ca76-000000002828 | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:46:32.713542 | fa163e0d-6f45-64a1-ca76-000000002828 | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:07:26.678548 | 2.90s 2026-01-22 12:46:32.739088 | fa163e0d-6f45-64a1-ca76-00000000282a | TASK | Ensure provider packages are installed 2026-01-22 12:46:35.674951 | fa163e0d-6f45-64a1-ca76-00000000282a | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:07:29.639948 | 2.93s 2026-01-22 12:46:35.687242 | fa163e0d-6f45-64a1-ca76-00000000282a | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:07:29.652245 | 2.95s 2026-01-22 12:46:35.715430 | fa163e0d-6f45-64a1-ca76-00000000282c | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:46:36.005003 | fa163e0d-6f45-64a1-ca76-00000000282c | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:46:36.006687 | fa163e0d-6f45-64a1-ca76-00000000282c | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:07:29.971696 | 0.29s 2026-01-22 12:46:36.016187 | fa163e0d-6f45-64a1-ca76-00000000282c | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:07:29.981208 | 0.30s 2026-01-22 12:46:36.033572 | fa163e0d-6f45-64a1-ca76-00000000282e | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:46:36.287858 | fa163e0d-6f45-64a1-ca76-00000000282e | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:46:36.289197 | fa163e0d-6f45-64a1-ca76-00000000282e | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:07:30.254204 | 0.25s 2026-01-22 12:46:36.297889 | fa163e0d-6f45-64a1-ca76-00000000282e | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:07:30.262901 | 0.26s 2026-01-22 12:46:36.323558 | fa163e0d-6f45-64a1-ca76-000000002830 | TASK | Ensure provider service is running 2026-01-22 12:46:37.765730 | fa163e0d-6f45-64a1-ca76-000000002830 | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:46:37.768539 | fa163e0d-6f45-64a1-ca76-000000002830 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:07:31.733543 | 1.44s 2026-01-22 12:46:37.779324 | fa163e0d-6f45-64a1-ca76-000000002830 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:07:31.744317 | 1.45s 2026-01-22 12:46:37.807864 | fa163e0d-6f45-64a1-ca76-000000002833 | TASK | Ensure certificate requests 2026-01-22 12:46:39.685867 | fa163e0d-6f45-64a1-ca76-000000002833 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'novnc-proxy', 'principal': 'novnc-proxy/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': 'container_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep nova_vnc_proxy)\nservice_crt="/etc/pki/tls/certs/novnc-proxy.crt"\nservice_key="/etc/pki/tls/private/novnc-proxy.key"\n# Copy the new cert from the mount-point to the real path\npodman exec -u root "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_crt" "$service_crt"\n# Copy the new key from the mount-point to the real path\npodman exec -u root "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_key" "$service_key"\n\n# Set permissions\npodman exec -u root "$container_name" chmod 0644 $service_crt\npodman exec -u root "$container_name" chmod 0640 $service_key\npodman exec -u root "$container_name" chgrp qemu $service_key\n\n# No need to trigger a reload for novnc proxy since the cert is not cached\n'} 2026-01-22 12:46:39.688935 | fa163e0d-6f45-64a1-ca76-000000002833 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:33.653940 | 1.88s 2026-01-22 12:46:39.700017 | fa163e0d-6f45-64a1-ca76-000000002833 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:33.665014 | 1.89s 2026-01-22 12:46:39.803486 | fa163e0d-6f45-64a1-ca76-00000000228d | TIMING | include_role : linux-system-roles.certificate | standalone | 0:07:33.768481 | 0.05s 2026-01-22 12:46:39.869844 | fa163e0d-6f45-64a1-ca76-0000000028d5 | TASK | Set version specific variables 2026-01-22 12:46:39.920123 | fa163e0d-6f45-64a1-ca76-0000000028d5 | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:07:33.885124 | 0.05s 2026-01-22 12:46:39.939737 | b529745a-257d-4e43-b327-31e69ba5565d | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:46:39.971615 | fa163e0d-6f45-64a1-ca76-000000002904 | TASK | Ensure ansible_facts used by role 2026-01-22 12:46:40.367985 | fa163e0d-6f45-64a1-ca76-000000002904 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:46:40.369597 | fa163e0d-6f45-64a1-ca76-000000002904 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:07:34.334605 | 0.40s 2026-01-22 12:46:40.396020 | fa163e0d-6f45-64a1-ca76-000000002905 | TASK | Set platform/version specific variables 2026-01-22 12:46:40.466811 | fa163e0d-6f45-64a1-ca76-000000002905 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:46:40.489036 | fa163e0d-6f45-64a1-ca76-000000002905 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:46:40.511483 | fa163e0d-6f45-64a1-ca76-000000002905 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:46:40.531549 | fa163e0d-6f45-64a1-ca76-000000002905 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:46:40.537352 | fa163e0d-6f45-64a1-ca76-000000002905 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:07:34.502348 | 0.14s 2026-01-22 12:46:40.565310 | fa163e0d-6f45-64a1-ca76-0000000028d6 | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:46:43.417642 | fa163e0d-6f45-64a1-ca76-0000000028d6 | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:46:43.419235 | fa163e0d-6f45-64a1-ca76-0000000028d6 | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:07:37.384239 | 2.85s 2026-01-22 12:46:43.445169 | fa163e0d-6f45-64a1-ca76-0000000028d8 | TASK | Ensure provider packages are installed 2026-01-22 12:46:46.307866 | fa163e0d-6f45-64a1-ca76-0000000028d8 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:07:40.272864 | 2.86s 2026-01-22 12:46:46.320098 | fa163e0d-6f45-64a1-ca76-0000000028d8 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:07:40.285099 | 2.87s 2026-01-22 12:46:46.346042 | fa163e0d-6f45-64a1-ca76-0000000028da | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:46:46.613292 | fa163e0d-6f45-64a1-ca76-0000000028da | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:46:46.614842 | fa163e0d-6f45-64a1-ca76-0000000028da | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:07:40.579845 | 0.27s 2026-01-22 12:46:46.620597 | fa163e0d-6f45-64a1-ca76-0000000028da | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:07:40.585608 | 0.27s 2026-01-22 12:46:46.641448 | fa163e0d-6f45-64a1-ca76-0000000028dc | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:46:46.916908 | fa163e0d-6f45-64a1-ca76-0000000028dc | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:46:46.919485 | fa163e0d-6f45-64a1-ca76-0000000028dc | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:07:40.884488 | 0.28s 2026-01-22 12:46:46.929162 | fa163e0d-6f45-64a1-ca76-0000000028dc | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:07:40.894165 | 0.29s 2026-01-22 12:46:46.958001 | fa163e0d-6f45-64a1-ca76-0000000028de | TASK | Ensure provider service is running 2026-01-22 12:46:47.392020 | fa163e0d-6f45-64a1-ca76-0000000028de | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:46:47.393844 | fa163e0d-6f45-64a1-ca76-0000000028de | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:07:41.358850 | 0.43s 2026-01-22 12:46:47.406946 | fa163e0d-6f45-64a1-ca76-0000000028de | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:07:41.371950 | 0.45s 2026-01-22 12:46:47.434452 | fa163e0d-6f45-64a1-ca76-0000000028e1 | TASK | Ensure certificate requests 2026-01-22 12:46:48.812719 | fa163e0d-6f45-64a1-ca76-0000000028e1 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'ovn_controller', 'principal': 'ovn_controller/standalone.internalapi.ooo.test@OOO.TEST'} 2026-01-22 12:46:48.815096 | fa163e0d-6f45-64a1-ca76-0000000028e1 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:42.780102 | 1.38s 2026-01-22 12:46:48.825791 | fa163e0d-6f45-64a1-ca76-0000000028e1 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:42.790793 | 1.39s 2026-01-22 12:46:48.878174 | fa163e0d-6f45-64a1-ca76-00000000228f | TASK | set is_ovn_dbs_bootstrap_node fact 2026-01-22 12:46:48.954358 | fa163e0d-6f45-64a1-ca76-00000000228f | OK | set is_ovn_dbs_bootstrap_node fact | standalone 2026-01-22 12:46:48.955809 | fa163e0d-6f45-64a1-ca76-00000000228f | TIMING | set is_ovn_dbs_bootstrap_node fact | standalone | 0:07:42.920814 | 0.08s 2026-01-22 12:46:48.982016 | fa163e0d-6f45-64a1-ca76-000000002290 | TASK | Populate ansible service facts so we can check for pacemaker service 2026-01-22 12:46:52.034510 | fa163e0d-6f45-64a1-ca76-000000002290 | OK | Populate ansible service facts so we can check for pacemaker service | standalone 2026-01-22 12:46:52.037007 | fa163e0d-6f45-64a1-ca76-000000002290 | TIMING | Populate ansible service facts so we can check for pacemaker service | standalone | 0:07:46.002020 | 3.05s 2026-01-22 12:46:52.058668 | fa163e0d-6f45-64a1-ca76-000000002292 | TASK | Fetch ovn-dbs-bundle 2026-01-22 12:46:52.097338 | fa163e0d-6f45-64a1-ca76-000000002292 | SKIPPED | Fetch ovn-dbs-bundle | standalone 2026-01-22 12:46:52.098162 | fa163e0d-6f45-64a1-ca76-000000002292 | TIMING | Fetch ovn-dbs-bundle | standalone | 0:07:46.063179 | 0.04s 2026-01-22 12:46:52.119835 | fa163e0d-6f45-64a1-ca76-000000002293 | TASK | Fetch ovn VIP 2026-01-22 12:46:52.177489 | fa163e0d-6f45-64a1-ca76-000000002293 | SKIPPED | Fetch ovn VIP | standalone 2026-01-22 12:46:52.178364 | fa163e0d-6f45-64a1-ca76-000000002293 | TIMING | Fetch ovn VIP | standalone | 0:07:46.143378 | 0.06s 2026-01-22 12:46:52.200401 | fa163e0d-6f45-64a1-ca76-000000002294 | TASK | Remove OVNDBs from pacemaker 2026-01-22 12:46:52.252412 | fa163e0d-6f45-64a1-ca76-000000002294 | SKIPPED | Remove OVNDBs from pacemaker | standalone 2026-01-22 12:46:52.253752 | fa163e0d-6f45-64a1-ca76-000000002294 | TIMING | Remove OVNDBs from pacemaker | standalone | 0:07:46.218755 | 0.05s 2026-01-22 12:46:52.284893 | fa163e0d-6f45-64a1-ca76-000000002295 | TASK | Remove OVNDBs VIP from pacemaker 2026-01-22 12:46:52.358091 | fa163e0d-6f45-64a1-ca76-000000002295 | SKIPPED | Remove OVNDBs VIP from pacemaker | standalone 2026-01-22 12:46:52.359440 | fa163e0d-6f45-64a1-ca76-000000002295 | TIMING | Remove OVNDBs VIP from pacemaker | standalone | 0:07:46.324447 | 0.07s 2026-01-22 12:46:52.388244 | fa163e0d-6f45-64a1-ca76-000000002296 | TASK | Clean up pacemaker remote nodes cache 2026-01-22 12:46:52.458124 | fa163e0d-6f45-64a1-ca76-000000002296 | SKIPPED | Clean up pacemaker remote nodes cache | standalone 2026-01-22 12:46:52.459383 | fa163e0d-6f45-64a1-ca76-000000002296 | TIMING | Clean up pacemaker remote nodes cache | standalone | 0:07:46.424390 | 0.07s 2026-01-22 12:46:52.487560 | fa163e0d-6f45-64a1-ca76-000000002297 | TASK | Remove pacemaker attributes 2026-01-22 12:46:52.559683 | fa163e0d-6f45-64a1-ca76-000000002297 | SKIPPED | Remove pacemaker attributes | standalone | item=standalone 2026-01-22 12:46:52.572284 | fa163e0d-6f45-64a1-ca76-000000002297 | TIMING | Remove pacemaker attributes | standalone | 0:07:46.537287 | 0.08s 2026-01-22 12:46:52.640497 | fa163e0d-6f45-64a1-ca76-00000000229a | TIMING | include_role : linux-system-roles.certificate | standalone | 0:07:46.605498 | 0.04s 2026-01-22 12:46:52.718341 | fa163e0d-6f45-64a1-ca76-00000000295d | TASK | Set version specific variables 2026-01-22 12:46:52.747512 | fa163e0d-6f45-64a1-ca76-00000000295d | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:07:46.712512 | 0.03s 2026-01-22 12:46:52.762538 | 3084318c-f368-46c9-92ba-7b3132409855 | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:46:52.787872 | fa163e0d-6f45-64a1-ca76-00000000298e | TASK | Ensure ansible_facts used by role 2026-01-22 12:46:53.196955 | fa163e0d-6f45-64a1-ca76-00000000298e | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:46:53.198538 | fa163e0d-6f45-64a1-ca76-00000000298e | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:07:47.163545 | 0.41s 2026-01-22 12:46:53.227759 | fa163e0d-6f45-64a1-ca76-00000000298f | TASK | Set platform/version specific variables 2026-01-22 12:46:53.291664 | fa163e0d-6f45-64a1-ca76-00000000298f | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:46:53.313534 | fa163e0d-6f45-64a1-ca76-00000000298f | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:46:53.336525 | fa163e0d-6f45-64a1-ca76-00000000298f | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:46:53.357258 | fa163e0d-6f45-64a1-ca76-00000000298f | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:46:53.363572 | fa163e0d-6f45-64a1-ca76-00000000298f | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:07:47.328560 | 0.13s 2026-01-22 12:46:53.394286 | fa163e0d-6f45-64a1-ca76-00000000295e | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:46:56.390318 | fa163e0d-6f45-64a1-ca76-00000000295e | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:46:56.391735 | fa163e0d-6f45-64a1-ca76-00000000295e | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:07:50.356742 | 3.00s 2026-01-22 12:46:56.421026 | fa163e0d-6f45-64a1-ca76-000000002960 | TASK | Ensure provider packages are installed 2026-01-22 12:46:59.263597 | fa163e0d-6f45-64a1-ca76-000000002960 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:07:53.228594 | 2.84s 2026-01-22 12:46:59.270411 | fa163e0d-6f45-64a1-ca76-000000002960 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:07:53.235422 | 2.85s 2026-01-22 12:46:59.294069 | fa163e0d-6f45-64a1-ca76-000000002962 | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:46:59.581476 | fa163e0d-6f45-64a1-ca76-000000002962 | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:46:59.583570 | fa163e0d-6f45-64a1-ca76-000000002962 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:07:53.548575 | 0.29s 2026-01-22 12:46:59.593796 | fa163e0d-6f45-64a1-ca76-000000002962 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:07:53.558798 | 0.30s 2026-01-22 12:46:59.624899 | fa163e0d-6f45-64a1-ca76-000000002964 | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:46:59.921239 | fa163e0d-6f45-64a1-ca76-000000002964 | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:46:59.923840 | fa163e0d-6f45-64a1-ca76-000000002964 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:07:53.888840 | 0.30s 2026-01-22 12:46:59.966024 | fa163e0d-6f45-64a1-ca76-000000002964 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:07:53.931026 | 0.34s 2026-01-22 12:46:59.989102 | fa163e0d-6f45-64a1-ca76-000000002966 | TASK | Ensure provider service is running 2026-01-22 12:47:00.417048 | fa163e0d-6f45-64a1-ca76-000000002966 | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:47:00.420156 | fa163e0d-6f45-64a1-ca76-000000002966 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:07:54.385159 | 0.43s 2026-01-22 12:47:00.430525 | fa163e0d-6f45-64a1-ca76-000000002966 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:07:54.395526 | 0.44s 2026-01-22 12:47:00.460405 | fa163e0d-6f45-64a1-ca76-000000002969 | TASK | Ensure certificate requests 2026-01-22 12:47:02.109338 | fa163e0d-6f45-64a1-ca76-000000002969 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'ovn_dbs', 'principal': 'ovn_dbs/standalone.internalapi.ooo.test@OOO.TEST'} 2026-01-22 12:47:02.112077 | fa163e0d-6f45-64a1-ca76-000000002969 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:56.077083 | 1.65s 2026-01-22 12:47:02.122613 | fa163e0d-6f45-64a1-ca76-000000002969 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:07:56.087617 | 1.66s 2026-01-22 12:47:02.179859 | fa163e0d-6f45-64a1-ca76-00000000229c | TASK | Configure OVN DBs and northd 2026-01-22 12:47:02.232824 | fa163e0d-6f45-64a1-ca76-00000000229c | TIMING | Configure OVN DBs and northd | standalone | 0:07:56.197818 | 0.05s 2026-01-22 12:47:02.300210 | fa163e0d-6f45-64a1-ca76-0000000029d8 | TASK | create directory /var/lib/config-data/ansible-generated/ovn 2026-01-22 12:47:02.567772 | fa163e0d-6f45-64a1-ca76-0000000029d8 | CHANGED | create directory /var/lib/config-data/ansible-generated/ovn | standalone 2026-01-22 12:47:02.571136 | fa163e0d-6f45-64a1-ca76-0000000029d8 | TIMING | tripleo_ovn_cluster : create directory /var/lib/config-data/ansible-generated/ovn | standalone | 0:07:56.536102 | 0.27s 2026-01-22 12:47:02.600625 | fa163e0d-6f45-64a1-ca76-0000000029d9 | TASK | set is_ovn_dbs_bootstrap_node fact 2026-01-22 12:47:02.664018 | fa163e0d-6f45-64a1-ca76-0000000029d9 | SKIPPED | set is_ovn_dbs_bootstrap_node fact | standalone 2026-01-22 12:47:02.665308 | fa163e0d-6f45-64a1-ca76-0000000029d9 | TIMING | tripleo_ovn_cluster : set is_ovn_dbs_bootstrap_node fact | standalone | 0:07:56.630316 | 0.06s 2026-01-22 12:47:02.694268 | fa163e0d-6f45-64a1-ca76-0000000029da | TASK | check ovsdb-tool election-timer options 2026-01-22 12:47:02.943050 | fa163e0d-6f45-64a1-ca76-0000000029da | CHANGED | check ovsdb-tool election-timer options | standalone 2026-01-22 12:47:02.944381 | fa163e0d-6f45-64a1-ca76-0000000029da | TIMING | tripleo_ovn_cluster : check ovsdb-tool election-timer options | standalone | 0:07:56.909388 | 0.25s 2026-01-22 12:47:02.974344 | fa163e0d-6f45-64a1-ca76-0000000029db | TASK | set has_ovsdb_tool_election_timer fact 2026-01-22 12:47:03.049826 | fa163e0d-6f45-64a1-ca76-0000000029db | OK | set has_ovsdb_tool_election_timer fact | standalone 2026-01-22 12:47:03.051273 | fa163e0d-6f45-64a1-ca76-0000000029db | TIMING | tripleo_ovn_cluster : set has_ovsdb_tool_election_timer fact | standalone | 0:07:57.016281 | 0.08s 2026-01-22 12:47:03.081933 | fa163e0d-6f45-64a1-ca76-0000000029dc | TASK | render OVN cluster configuration 2026-01-22 12:47:03.638000 | fa163e0d-6f45-64a1-ca76-0000000029dc | CHANGED | render OVN cluster configuration | standalone 2026-01-22 12:47:03.639493 | fa163e0d-6f45-64a1-ca76-0000000029dc | TIMING | tripleo_ovn_cluster : render OVN cluster configuration | standalone | 0:07:57.604501 | 0.56s 2026-01-22 12:47:03.668401 | fa163e0d-6f45-64a1-ca76-0000000029dd | TASK | restart OVN northd and north and south databases 2026-01-22 12:47:03.704373 | fa163e0d-6f45-64a1-ca76-0000000029dd | TIMING | tripleo_ovn_cluster : restart OVN northd and north and south databases | standalone | 0:07:57.669365 | 0.03s 2026-01-22 12:47:03.716077 | fa163e0d-6f45-64a1-ca76-0000000029dd | TIMING | tripleo_ovn_cluster : restart OVN northd and north and south databases | standalone | 0:07:57.681089 | 0.05s 2026-01-22 12:47:03.725619 | fa163e0d-6f45-64a1-ca76-0000000029dd | TIMING | tripleo_ovn_cluster : restart OVN northd and north and south databases | standalone | 0:07:57.690632 | 0.06s 2026-01-22 12:47:03.731974 | fa163e0d-6f45-64a1-ca76-0000000029dd | TIMING | tripleo_ovn_cluster : restart OVN northd and north and south databases | standalone | 0:07:57.696978 | 0.06s 2026-01-22 12:47:03.757924 | 6b0efb84-47ae-4600-a71a-17b5a893ac68 | INCLUDED | /usr/share/ansible/roles/tripleo_ovn_cluster/tasks/restart.yml | standalone 2026-01-22 12:47:03.766553 | 253d3c13-9f2c-4073-8406-bba44a3bb1dc | INCLUDED | /usr/share/ansible/roles/tripleo_ovn_cluster/tasks/restart.yml | standalone 2026-01-22 12:47:03.774806 | 33723e00-6a34-40aa-b8bb-906a800224a3 | INCLUDED | /usr/share/ansible/roles/tripleo_ovn_cluster/tasks/restart.yml | standalone 2026-01-22 12:47:03.807435 | fa163e0d-6f45-64a1-ca76-000000002a0c | TASK | check if tripleo_cluster_north_db_server systemd service is active 2026-01-22 12:47:04.075010 | fa163e0d-6f45-64a1-ca76-000000002a0c | CHANGED | check if tripleo_cluster_north_db_server systemd service is active | standalone 2026-01-22 12:47:04.076367 | fa163e0d-6f45-64a1-ca76-000000002a0c | TIMING | tripleo_ovn_cluster : check if tripleo_cluster_north_db_server systemd service is active | standalone | 0:07:58.041375 | 0.27s 2026-01-22 12:47:04.107700 | fa163e0d-6f45-64a1-ca76-000000002a0d | TASK | restart tripleo_cluster_north_db_server systemd service 2026-01-22 12:47:04.158600 | fa163e0d-6f45-64a1-ca76-000000002a0d | SKIPPED | restart tripleo_cluster_north_db_server systemd service | standalone 2026-01-22 12:47:04.159745 | fa163e0d-6f45-64a1-ca76-000000002a0d | TIMING | tripleo_ovn_cluster : restart tripleo_cluster_north_db_server systemd service | standalone | 0:07:58.124753 | 0.05s 2026-01-22 12:47:04.191416 | fa163e0d-6f45-64a1-ca76-000000002a11 | TASK | check if tripleo_cluster_south_db_server systemd service is active 2026-01-22 12:47:04.457197 | fa163e0d-6f45-64a1-ca76-000000002a11 | CHANGED | check if tripleo_cluster_south_db_server systemd service is active | standalone 2026-01-22 12:47:04.458508 | fa163e0d-6f45-64a1-ca76-000000002a11 | TIMING | tripleo_ovn_cluster : check if tripleo_cluster_south_db_server systemd service is active | standalone | 0:07:58.423518 | 0.27s 2026-01-22 12:47:04.489111 | fa163e0d-6f45-64a1-ca76-000000002a12 | TASK | restart tripleo_cluster_south_db_server systemd service 2026-01-22 12:47:04.539809 | fa163e0d-6f45-64a1-ca76-000000002a12 | SKIPPED | restart tripleo_cluster_south_db_server systemd service | standalone 2026-01-22 12:47:04.541041 | fa163e0d-6f45-64a1-ca76-000000002a12 | TIMING | tripleo_ovn_cluster : restart tripleo_cluster_south_db_server systemd service | standalone | 0:07:58.506048 | 0.05s 2026-01-22 12:47:04.571501 | fa163e0d-6f45-64a1-ca76-000000002a16 | TASK | check if tripleo_cluster_northd systemd service is active 2026-01-22 12:47:04.828960 | fa163e0d-6f45-64a1-ca76-000000002a16 | CHANGED | check if tripleo_cluster_northd systemd service is active | standalone 2026-01-22 12:47:04.830474 | fa163e0d-6f45-64a1-ca76-000000002a16 | TIMING | tripleo_ovn_cluster : check if tripleo_cluster_northd systemd service is active | standalone | 0:07:58.795482 | 0.26s 2026-01-22 12:47:04.862108 | fa163e0d-6f45-64a1-ca76-000000002a17 | TASK | restart tripleo_cluster_northd systemd service 2026-01-22 12:47:04.917928 | fa163e0d-6f45-64a1-ca76-000000002a17 | SKIPPED | restart tripleo_cluster_northd systemd service | standalone 2026-01-22 12:47:04.919232 | fa163e0d-6f45-64a1-ca76-000000002a17 | TIMING | tripleo_ovn_cluster : restart tripleo_cluster_northd systemd service | standalone | 0:07:58.884238 | 0.06s 2026-01-22 12:47:05.027927 | fa163e0d-6f45-64a1-ca76-00000000229e | TIMING | include_role : linux-system-roles.certificate | standalone | 0:07:58.992927 | 0.05s 2026-01-22 12:47:05.095932 | fa163e0d-6f45-64a1-ca76-000000002a64 | TASK | Set version specific variables 2026-01-22 12:47:05.144525 | fa163e0d-6f45-64a1-ca76-000000002a64 | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:07:59.109529 | 0.05s 2026-01-22 12:47:05.166873 | 7c28b2bf-7320-41af-a8d2-b19f6a0f6092 | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:47:05.199051 | fa163e0d-6f45-64a1-ca76-000000002a93 | TASK | Ensure ansible_facts used by role 2026-01-22 12:47:05.590262 | fa163e0d-6f45-64a1-ca76-000000002a93 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:47:05.591901 | fa163e0d-6f45-64a1-ca76-000000002a93 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:07:59.556905 | 0.39s 2026-01-22 12:47:05.620741 | fa163e0d-6f45-64a1-ca76-000000002a94 | TASK | Set platform/version specific variables 2026-01-22 12:47:05.711394 | fa163e0d-6f45-64a1-ca76-000000002a94 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:47:05.741182 | fa163e0d-6f45-64a1-ca76-000000002a94 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:47:05.768528 | fa163e0d-6f45-64a1-ca76-000000002a94 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:47:05.788496 | fa163e0d-6f45-64a1-ca76-000000002a94 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:47:05.794449 | fa163e0d-6f45-64a1-ca76-000000002a94 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:07:59.759448 | 0.17s 2026-01-22 12:47:05.825038 | fa163e0d-6f45-64a1-ca76-000000002a65 | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:47:08.889048 | fa163e0d-6f45-64a1-ca76-000000002a65 | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:47:08.890385 | fa163e0d-6f45-64a1-ca76-000000002a65 | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:08:02.855395 | 3.06s 2026-01-22 12:47:08.918430 | fa163e0d-6f45-64a1-ca76-000000002a67 | TASK | Ensure provider packages are installed 2026-01-22 12:47:12.359708 | fa163e0d-6f45-64a1-ca76-000000002a67 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:08:06.324701 | 3.44s 2026-01-22 12:47:12.371830 | fa163e0d-6f45-64a1-ca76-000000002a67 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:08:06.336832 | 3.45s 2026-01-22 12:47:12.401033 | fa163e0d-6f45-64a1-ca76-000000002a69 | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:47:12.701294 | fa163e0d-6f45-64a1-ca76-000000002a69 | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:47:12.702972 | fa163e0d-6f45-64a1-ca76-000000002a69 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:08:06.667973 | 0.30s 2026-01-22 12:47:12.709087 | fa163e0d-6f45-64a1-ca76-000000002a69 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:08:06.674095 | 0.31s 2026-01-22 12:47:12.731789 | fa163e0d-6f45-64a1-ca76-000000002a6b | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:47:13.023210 | fa163e0d-6f45-64a1-ca76-000000002a6b | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:47:13.024689 | fa163e0d-6f45-64a1-ca76-000000002a6b | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:08:06.989669 | 0.29s 2026-01-22 12:47:13.030935 | fa163e0d-6f45-64a1-ca76-000000002a6b | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:08:06.995936 | 0.30s 2026-01-22 12:47:13.059747 | fa163e0d-6f45-64a1-ca76-000000002a6d | TASK | Ensure provider service is running 2026-01-22 12:47:13.498156 | fa163e0d-6f45-64a1-ca76-000000002a6d | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:47:13.501055 | fa163e0d-6f45-64a1-ca76-000000002a6d | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:08:07.466062 | 0.44s 2026-01-22 12:47:13.511973 | fa163e0d-6f45-64a1-ca76-000000002a6d | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:08:07.476975 | 0.45s 2026-01-22 12:47:13.542985 | fa163e0d-6f45-64a1-ca76-000000002a70 | TASK | Ensure certificate requests 2026-01-22 12:47:15.304071 | fa163e0d-6f45-64a1-ca76-000000002a70 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'ovn_metadata', 'principal': 'ovn_metadata/standalone.internalapi.ooo.test@OOO.TEST'} 2026-01-22 12:47:15.306996 | fa163e0d-6f45-64a1-ca76-000000002a70 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:08:09.271995 | 1.76s 2026-01-22 12:47:15.317167 | fa163e0d-6f45-64a1-ca76-000000002a70 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:08:09.282178 | 1.77s 2026-01-22 12:47:15.420603 | fa163e0d-6f45-64a1-ca76-0000000022a0 | TIMING | include_role : linux-system-roles.certificate | standalone | 0:08:09.385603 | 0.05s 2026-01-22 12:47:15.487544 | fa163e0d-6f45-64a1-ca76-000000002ad8 | TASK | Set version specific variables 2026-01-22 12:47:15.533788 | fa163e0d-6f45-64a1-ca76-000000002ad8 | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:08:09.498796 | 0.05s 2026-01-22 12:47:15.552930 | 8be293a1-67ef-4e05-a02b-56ef870f3474 | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:47:15.585400 | fa163e0d-6f45-64a1-ca76-000000002b07 | TASK | Ensure ansible_facts used by role 2026-01-22 12:47:15.950092 | fa163e0d-6f45-64a1-ca76-000000002b07 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:47:15.951626 | fa163e0d-6f45-64a1-ca76-000000002b07 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:08:09.916634 | 0.36s 2026-01-22 12:47:15.980121 | fa163e0d-6f45-64a1-ca76-000000002b08 | TASK | Set platform/version specific variables 2026-01-22 12:47:16.037870 | fa163e0d-6f45-64a1-ca76-000000002b08 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:47:16.055137 | fa163e0d-6f45-64a1-ca76-000000002b08 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:47:16.068691 | fa163e0d-6f45-64a1-ca76-000000002b08 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:47:16.079281 | fa163e0d-6f45-64a1-ca76-000000002b08 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:47:16.084583 | fa163e0d-6f45-64a1-ca76-000000002b08 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:08:10.049600 | 0.10s 2026-01-22 12:47:16.103222 | fa163e0d-6f45-64a1-ca76-000000002ad9 | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:47:19.189012 | fa163e0d-6f45-64a1-ca76-000000002ad9 | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:47:19.190752 | fa163e0d-6f45-64a1-ca76-000000002ad9 | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:08:13.155754 | 3.09s 2026-01-22 12:47:19.228121 | fa163e0d-6f45-64a1-ca76-000000002adb | TASK | Ensure provider packages are installed 2026-01-22 12:47:22.131130 | fa163e0d-6f45-64a1-ca76-000000002adb | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:08:16.096128 | 2.90s 2026-01-22 12:47:22.137609 | fa163e0d-6f45-64a1-ca76-000000002adb | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:08:16.102618 | 2.91s 2026-01-22 12:47:22.170965 | fa163e0d-6f45-64a1-ca76-000000002add | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:47:22.450368 | fa163e0d-6f45-64a1-ca76-000000002add | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:47:22.452309 | fa163e0d-6f45-64a1-ca76-000000002add | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:08:16.417316 | 0.28s 2026-01-22 12:47:22.462535 | fa163e0d-6f45-64a1-ca76-000000002add | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:08:16.427548 | 0.29s 2026-01-22 12:47:22.491314 | fa163e0d-6f45-64a1-ca76-000000002adf | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:47:22.773816 | fa163e0d-6f45-64a1-ca76-000000002adf | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:47:22.775129 | fa163e0d-6f45-64a1-ca76-000000002adf | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:08:16.740135 | 0.28s 2026-01-22 12:47:22.786983 | fa163e0d-6f45-64a1-ca76-000000002adf | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:08:16.751995 | 0.29s 2026-01-22 12:47:22.817745 | fa163e0d-6f45-64a1-ca76-000000002ae1 | TASK | Ensure provider service is running 2026-01-22 12:47:23.290536 | fa163e0d-6f45-64a1-ca76-000000002ae1 | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:47:23.293118 | fa163e0d-6f45-64a1-ca76-000000002ae1 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:08:17.258122 | 0.47s 2026-01-22 12:47:23.303741 | fa163e0d-6f45-64a1-ca76-000000002ae1 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:08:17.268744 | 0.48s 2026-01-22 12:47:23.333739 | fa163e0d-6f45-64a1-ca76-000000002ae4 | TASK | Ensure certificate requests 2026-01-22 12:47:25.415182 | fa163e0d-6f45-64a1-ca76-000000002ae4 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'ovn_octavia', 'principal': 'ovn_octavia/standalone.internalapi.ooo.test@OOO.TEST'} 2026-01-22 12:47:25.417723 | fa163e0d-6f45-64a1-ca76-000000002ae4 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:08:19.382724 | 2.08s 2026-01-22 12:47:25.428163 | fa163e0d-6f45-64a1-ca76-000000002ae4 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:08:19.393168 | 2.09s 2026-01-22 12:47:25.541419 | fa163e0d-6f45-64a1-ca76-0000000022a2 | TIMING | include_role : linux-system-roles.certificate | standalone | 0:08:19.506416 | 0.05s 2026-01-22 12:47:25.609639 | fa163e0d-6f45-64a1-ca76-000000002b4c | TASK | Set version specific variables 2026-01-22 12:47:25.657259 | fa163e0d-6f45-64a1-ca76-000000002b4c | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:08:19.622257 | 0.05s 2026-01-22 12:47:25.680617 | 945363ac-6a0e-4a50-82ca-672a62063487 | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:47:25.707805 | fa163e0d-6f45-64a1-ca76-000000002b7b | TASK | Ensure ansible_facts used by role 2026-01-22 12:47:26.104510 | fa163e0d-6f45-64a1-ca76-000000002b7b | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:47:26.106053 | fa163e0d-6f45-64a1-ca76-000000002b7b | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:08:20.071061 | 0.40s 2026-01-22 12:47:26.135225 | fa163e0d-6f45-64a1-ca76-000000002b7c | TASK | Set platform/version specific variables 2026-01-22 12:47:26.226873 | fa163e0d-6f45-64a1-ca76-000000002b7c | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:47:26.257745 | fa163e0d-6f45-64a1-ca76-000000002b7c | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:47:26.284332 | fa163e0d-6f45-64a1-ca76-000000002b7c | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:47:26.304902 | fa163e0d-6f45-64a1-ca76-000000002b7c | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:47:26.310861 | fa163e0d-6f45-64a1-ca76-000000002b7c | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:08:20.275862 | 0.17s 2026-01-22 12:47:26.340209 | fa163e0d-6f45-64a1-ca76-000000002b4d | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:47:29.236979 | fa163e0d-6f45-64a1-ca76-000000002b4d | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:47:29.238366 | fa163e0d-6f45-64a1-ca76-000000002b4d | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:08:23.203373 | 2.90s 2026-01-22 12:47:29.269132 | fa163e0d-6f45-64a1-ca76-000000002b4f | TASK | Ensure provider packages are installed 2026-01-22 12:47:32.265105 | fa163e0d-6f45-64a1-ca76-000000002b4f | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:08:26.230105 | 2.99s 2026-01-22 12:47:32.277353 | fa163e0d-6f45-64a1-ca76-000000002b4f | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:08:26.242365 | 3.01s 2026-01-22 12:47:32.307245 | fa163e0d-6f45-64a1-ca76-000000002b51 | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:47:32.597093 | fa163e0d-6f45-64a1-ca76-000000002b51 | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:47:32.598568 | fa163e0d-6f45-64a1-ca76-000000002b51 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:08:26.563576 | 0.29s 2026-01-22 12:47:32.604495 | fa163e0d-6f45-64a1-ca76-000000002b51 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:08:26.569508 | 0.30s 2026-01-22 12:47:32.634768 | fa163e0d-6f45-64a1-ca76-000000002b53 | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:47:32.929154 | fa163e0d-6f45-64a1-ca76-000000002b53 | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:47:32.930893 | fa163e0d-6f45-64a1-ca76-000000002b53 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:08:26.895899 | 0.29s 2026-01-22 12:47:32.940258 | fa163e0d-6f45-64a1-ca76-000000002b53 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:08:26.905279 | 0.30s 2026-01-22 12:47:32.959323 | fa163e0d-6f45-64a1-ca76-000000002b55 | TASK | Ensure provider service is running 2026-01-22 12:47:33.375804 | fa163e0d-6f45-64a1-ca76-000000002b55 | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:47:33.377541 | fa163e0d-6f45-64a1-ca76-000000002b55 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:08:27.342549 | 0.42s 2026-01-22 12:47:33.391895 | fa163e0d-6f45-64a1-ca76-000000002b55 | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:08:27.356906 | 0.43s 2026-01-22 12:47:33.421832 | fa163e0d-6f45-64a1-ca76-000000002b58 | TASK | Ensure certificate requests 2026-01-22 12:47:35.641875 | fa163e0d-6f45-64a1-ca76-000000002b58 | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': 'standalone.internalapi.ooo.test', 'key_size': '2048', 'name': 'rabbitmq', 'principal': 'rabbitmq/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': 'container_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep -w -E \'rabbitmq(-bundle-.*-[0-9]+)?\')\nservice_crt="/etc/pki/tls/certs/rabbitmq.crt"\nservice_key="/etc/pki/tls/private/rabbitmq.key"\nif echo "$container_name" | grep -q "^rabbitmq-bundle"; then\n # lp#1917868: Do not use podman cp with HA containers as they get\n # frozen temporarily and that can make pacemaker operation fail.\n tar -c "$service_crt" "$service_key" | podman exec -i "$container_name" tar -C / -xv\n # no need to update the mount point, because pacemaker\n # recreates the container when it\'s restarted\nelse\n # Refresh the cert at the mount-point\n podman cp $service_crt "$container_name:/var/lib/kolla/config_files/src-tls/$service_crt"\n # Refresh the key at the mount-point\n podman cp $service_key "$container_name:/var/lib/kolla/config_files/src-tls/$service_key"\n # Copy the new cert from the mount-point to the real path\n podman exec -u root "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_crt" "$service_crt"\n # Copy the new key from the mount-point to the real path\n podman exec -u root "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_key" "$service_key"\nfi\n# Set appropriate permissions\npodman exec -u root "$container_name" chown rabbitmq:rabbitmq "$service_crt"\npodman exec -u root "$container_name" chown rabbitmq:rabbitmq "$service_key"\n# Trigger a pem cache clear in RabbitMQ to read the new certificates\npodman exec "$container_name" rabbitmqctl eval "ssl:clear_pem_cache()."\n'} 2026-01-22 12:47:35.643431 | fa163e0d-6f45-64a1-ca76-000000002b58 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:08:29.608436 | 2.22s 2026-01-22 12:47:35.652606 | fa163e0d-6f45-64a1-ca76-000000002b58 | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:08:29.617610 | 2.23s 2026-01-22 12:47:35.708340 | fa163e0d-6f45-64a1-ca76-0000000022a9 | TASK | Fail if container image is undefined 2026-01-22 12:47:35.772015 | fa163e0d-6f45-64a1-ca76-0000000022a9 | SKIPPED | Fail if container image is undefined | standalone 2026-01-22 12:47:35.773177 | fa163e0d-6f45-64a1-ca76-0000000022a9 | TIMING | tripleo_container_tag : Fail if container image is undefined | standalone | 0:08:29.738183 | 0.06s 2026-01-22 12:47:35.820190 | fa163e0d-6f45-64a1-ca76-0000000022aa | TASK | Pull registry.redhat.io/rhosp-rhel9/openstack-rabbitmq:17.1 image 2026-01-22 12:47:42.013803 | fa163e0d-6f45-64a1-ca76-0000000022aa | CHANGED | Pull registry.redhat.io/rhosp-rhel9/openstack-rabbitmq:17.1 image | standalone 2026-01-22 12:47:42.015072 | fa163e0d-6f45-64a1-ca76-0000000022aa | TIMING | tripleo_container_tag : Pull registry.redhat.io/rhosp-rhel9/openstack-rabbitmq:17.1 image | standalone | 0:08:35.980086 | 6.19s 2026-01-22 12:47:42.057487 | fa163e0d-6f45-64a1-ca76-0000000022ab | TASK | Tag cluster.common.tag/rabbitmq:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-rabbitmq:17.1 image 2026-01-22 12:47:42.401389 | fa163e0d-6f45-64a1-ca76-0000000022ab | CHANGED | Tag cluster.common.tag/rabbitmq:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-rabbitmq:17.1 image | standalone 2026-01-22 12:47:42.402714 | fa163e0d-6f45-64a1-ca76-0000000022ab | TIMING | tripleo_container_tag : Tag cluster.common.tag/rabbitmq:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-rabbitmq:17.1 image | standalone | 0:08:36.367723 | 0.34s 2026-01-22 12:47:42.454454 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TASK | Pre-fetch all the containers 2026-01-22 12:47:47.060452 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-ovn-nb-db-server:17.1 2026-01-22 12:47:47.061985 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:08:41.026999 | 4.61s 2026-01-22 12:47:51.663040 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-ovn-northd:17.1 2026-01-22 12:47:51.666007 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:08:45.631015 | 9.21s 2026-01-22 12:47:56.633553 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-ovn-sb-db-server:17.1 2026-01-22 12:47:56.635932 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:08:50.600949 | 14.18s 2026-01-22 12:48:02.100257 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-memcached:17.1 2026-01-22 12:48:02.103817 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:08:56.068825 | 19.65s 2026-01-22 12:48:04.265346 | fa163e0d-6f45-64a1-ca76-0000000022b6 | OK | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-mariadb:17.1 2026-01-22 12:48:04.269666 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:08:58.234658 | 21.81s 2026-01-22 12:48:06.569400 | fa163e0d-6f45-64a1-ca76-0000000022b6 | OK | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-rabbitmq:17.1 2026-01-22 12:48:06.572871 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:09:00.537878 | 24.12s 2026-01-22 12:48:11.740427 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-redis:17.1 2026-01-22 12:48:11.743082 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:09:05.708091 | 29.29s 2026-01-22 12:48:16.990380 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-aodh-api:17.1 2026-01-22 12:48:16.994153 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:09:10.959160 | 34.54s 2026-01-22 12:48:21.067344 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-cinder-api:17.1 2026-01-22 12:48:21.070175 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:09:15.035185 | 38.61s 2026-01-22 12:48:23.797575 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-cinder-scheduler:17.1 2026-01-22 12:48:23.800605 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:09:17.765614 | 41.34s 2026-01-22 12:48:38.756609 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-neutron-dhcp-agent:17.1 2026-01-22 12:48:38.760641 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:09:32.725649 | 56.30s 2026-01-22 12:48:47.872948 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-neutron-metadata-agent-ovn:17.1 2026-01-22 12:48:47.877737 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:09:41.842738 | 65.42s 2026-01-22 12:49:07.750940 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-nova-libvirt:17.1 2026-01-22 12:49:07.752767 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:10:01.717783 | 85.30s 2026-01-22 12:49:17.428555 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-glance-api:17.1 2026-01-22 12:49:17.434576 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:10:11.399579 | 94.98s 2026-01-22 12:49:26.506970 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-gnocchi-api:17.1 2026-01-22 12:49:26.512072 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:10:20.477076 | 104.06s 2026-01-22 12:49:36.471053 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-heat-engine:17.1 2026-01-22 12:49:36.475796 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:10:30.440803 | 114.02s 2026-01-22 12:49:43.942336 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-horizon:17.1 2026-01-22 12:49:43.945019 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:10:37.910028 | 121.49s 2026-01-22 12:49:49.020016 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-keystone:17.1 2026-01-22 12:49:49.021832 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:10:42.986847 | 126.57s 2026-01-22 12:49:52.638730 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-manila-api:17.1 2026-01-22 12:49:52.641751 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:10:46.606759 | 130.19s 2026-01-22 12:49:56.501641 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-neutron-server:17.1 2026-01-22 12:49:56.505782 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:10:50.470791 | 134.05s 2026-01-22 12:50:01.919175 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-nova-api:17.1 2026-01-22 12:50:01.922435 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:10:55.887442 | 139.47s 2026-01-22 12:50:14.712499 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-nova-compute:17.1 2026-01-22 12:50:14.715547 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:11:08.680555 | 152.26s 2026-01-22 12:50:18.033813 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-nova-conductor:17.1 2026-01-22 12:50:18.037053 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:11:12.002061 | 155.58s 2026-01-22 12:50:32.220190 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-octavia-api:17.1 2026-01-22 12:50:32.223102 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:11:26.188110 | 169.77s 2026-01-22 12:50:35.300852 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-octavia-health-manager:17.1 2026-01-22 12:50:35.302710 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:11:29.267725 | 172.85s 2026-01-22 12:50:48.012132 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-octavia-housekeeping:17.1 2026-01-22 12:50:48.015131 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:11:41.980140 | 185.56s 2026-01-22 12:50:50.688434 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-octavia-worker:17.1 2026-01-22 12:50:50.692104 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:11:44.657114 | 188.24s 2026-01-22 12:50:54.511398 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-placement-api:17.1 2026-01-22 12:50:54.515247 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:11:48.480252 | 192.06s 2026-01-22 12:50:58.981754 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-ceilometer-notification:17.1 2026-01-22 12:50:58.984552 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:11:52.949561 | 196.53s 2026-01-22 12:51:01.219062 | fa163e0d-6f45-64a1-ca76-0000000022b6 | OK | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-cinder-backup:17.1 2026-01-22 12:51:01.222870 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:11:55.187879 | 198.77s 2026-01-22 12:51:04.513905 | fa163e0d-6f45-64a1-ca76-0000000022b6 | OK | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-cinder-volume:17.1 2026-01-22 12:51:04.516087 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:11:58.481100 | 202.06s 2026-01-22 12:51:07.215812 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-iscsid:17.1 2026-01-22 12:51:07.219006 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:01.184011 | 204.76s 2026-01-22 12:51:09.661054 | fa163e0d-6f45-64a1-ca76-0000000022b6 | OK | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-manila-share:17.1 2026-01-22 12:51:09.662888 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:03.627901 | 207.21s 2026-01-22 12:51:13.809417 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-swift-proxy-server:17.1 2026-01-22 12:51:13.813468 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:07.778476 | 211.36s 2026-01-22 12:51:16.437526 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-swift-account:17.1 2026-01-22 12:51:16.440537 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:10.405546 | 213.98s 2026-01-22 12:51:19.694740 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-aodh-evaluator:17.1 2026-01-22 12:51:19.697792 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:13.662801 | 217.24s 2026-01-22 12:51:22.531521 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-aodh-listener:17.1 2026-01-22 12:51:22.534697 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:16.499703 | 220.08s 2026-01-22 12:51:25.434476 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-aodh-notifier:17.1 2026-01-22 12:51:25.439033 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:19.404039 | 222.98s 2026-01-22 12:51:29.065016 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-ceilometer-central:17.1 2026-01-22 12:51:29.069485 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:23.034485 | 226.61s 2026-01-22 12:51:31.999728 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-ceilometer-compute:17.1 2026-01-22 12:51:32.003348 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:25.968357 | 229.55s 2026-01-22 12:51:34.687251 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-ovn-controller:17.1 2026-01-22 12:51:34.690785 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:28.655794 | 232.23s 2026-01-22 12:51:37.610042 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-heat-api:17.1 2026-01-22 12:51:37.614528 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:31.579514 | 235.16s 2026-01-22 12:51:40.532255 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-heat-api-cfn:17.1 2026-01-22 12:51:40.534617 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:34.499631 | 238.08s 2026-01-22 12:51:43.485551 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-cron:17.1 2026-01-22 12:51:43.488156 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:37.453166 | 241.03s 2026-01-22 12:51:45.702391 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-manila-scheduler:17.1 2026-01-22 12:51:45.706950 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:39.671952 | 243.25s 2026-01-22 12:51:48.833388 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-neutron-sriov-agent:17.1 2026-01-22 12:51:48.835273 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:42.800288 | 246.38s 2026-01-22 12:51:51.998889 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-nova-scheduler:17.1 2026-01-22 12:51:52.002900 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:45.967908 | 249.55s 2026-01-22 12:51:55.610145 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-nova-novncproxy:17.1 2026-01-22 12:51:55.613058 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:49.578066 | 253.16s 2026-01-22 12:51:58.612807 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-swift-container:17.1 2026-01-22 12:51:58.616625 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:52.581634 | 256.16s 2026-01-22 12:52:02.810091 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-swift-object:17.1 2026-01-22 12:52:02.813939 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:12:56.778948 | 260.36s 2026-01-22 12:52:20.717950 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-gnocchi-metricd:17.1 2026-01-22 12:52:20.720934 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:13:14.685941 | 278.26s 2026-01-22 12:52:23.474489 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-gnocchi-statsd:17.1 2026-01-22 12:52:23.478696 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:13:17.443704 | 281.02s 2026-01-22 12:52:26.917894 | fa163e0d-6f45-64a1-ca76-0000000022b6 | CHANGED | Pre-fetch all the containers | standalone | item=registry.redhat.io/rhosp-rhel9/openstack-rsyslog:17.1 2026-01-22 12:52:26.920401 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:13:20.885404 | 284.46s 2026-01-22 12:52:27.245938 | fa163e0d-6f45-64a1-ca76-0000000022b6 | TIMING | Pre-fetch all the containers | standalone | 0:13:21.210946 | 284.79s 2026-01-22 12:52:27.471059 | fa163e0d-6f45-64a1-ca76-0000000022b9 | TIMING | include_role : linux-system-roles.certificate | standalone | 0:13:21.436069 | 0.08s 2026-01-22 12:52:27.522778 | fa163e0d-6f45-64a1-ca76-000000002c32 | TASK | Set version specific variables 2026-01-22 12:52:27.572304 | fa163e0d-6f45-64a1-ca76-000000002c32 | TIMING | linux-system-roles.certificate : Set version specific variables | standalone | 0:13:21.537317 | 0.05s 2026-01-22 12:52:27.583290 | 95a97eb6-0f9f-46c2-8795-06f69e54a7da | INCLUDED | /usr/share/ansible/roles/rhel-system-roles.certificate/tasks/set_vars.yml | standalone 2026-01-22 12:52:27.603517 | fa163e0d-6f45-64a1-ca76-000000002c61 | TASK | Ensure ansible_facts used by role 2026-01-22 12:52:27.989969 | fa163e0d-6f45-64a1-ca76-000000002c61 | OK | Ensure ansible_facts used by role | standalone 2026-01-22 12:52:27.991601 | fa163e0d-6f45-64a1-ca76-000000002c61 | TIMING | linux-system-roles.certificate : Ensure ansible_facts used by role | standalone | 0:13:21.956609 | 0.39s 2026-01-22 12:52:28.078874 | fa163e0d-6f45-64a1-ca76-000000002c62 | TASK | Set platform/version specific variables 2026-01-22 12:52:28.176019 | fa163e0d-6f45-64a1-ca76-000000002c62 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:52:28.206242 | fa163e0d-6f45-64a1-ca76-000000002c62 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat.yml 2026-01-22 12:52:28.234481 | fa163e0d-6f45-64a1-ca76-000000002c62 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.yml 2026-01-22 12:52:28.254374 | fa163e0d-6f45-64a1-ca76-000000002c62 | SKIPPED | Set platform/version specific variables | standalone | item=RedHat_9.2.yml 2026-01-22 12:52:28.260336 | fa163e0d-6f45-64a1-ca76-000000002c62 | TIMING | linux-system-roles.certificate : Set platform/version specific variables | standalone | 0:13:22.225337 | 0.18s 2026-01-22 12:52:28.290064 | fa163e0d-6f45-64a1-ca76-000000002c33 | TASK | Ensure certificate role dependencies are installed 2026-01-22 12:52:31.274869 | fa163e0d-6f45-64a1-ca76-000000002c33 | OK | Ensure certificate role dependencies are installed | standalone 2026-01-22 12:52:31.276605 | fa163e0d-6f45-64a1-ca76-000000002c33 | TIMING | linux-system-roles.certificate : Ensure certificate role dependencies are installed | standalone | 0:13:25.241605 | 2.98s 2026-01-22 12:52:31.305950 | fa163e0d-6f45-64a1-ca76-000000002c35 | TASK | Ensure provider packages are installed 2026-01-22 12:52:34.245710 | fa163e0d-6f45-64a1-ca76-000000002c35 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:13:28.210708 | 2.94s 2026-01-22 12:52:34.257403 | fa163e0d-6f45-64a1-ca76-000000002c35 | TIMING | linux-system-roles.certificate : Ensure provider packages are installed | standalone | 0:13:28.222412 | 2.95s 2026-01-22 12:52:34.280478 | fa163e0d-6f45-64a1-ca76-000000002c37 | TASK | Ensure pre-scripts hooks directory exists 2026-01-22 12:52:34.579393 | fa163e0d-6f45-64a1-ca76-000000002c37 | OK | Ensure pre-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:52:34.581637 | fa163e0d-6f45-64a1-ca76-000000002c37 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:13:28.546644 | 0.30s 2026-01-22 12:52:34.591874 | fa163e0d-6f45-64a1-ca76-000000002c37 | TIMING | linux-system-roles.certificate : Ensure pre-scripts hooks directory exists | standalone | 0:13:28.556877 | 0.31s 2026-01-22 12:52:34.622025 | fa163e0d-6f45-64a1-ca76-000000002c39 | TASK | Ensure post-scripts hooks directory exists 2026-01-22 12:52:34.918041 | fa163e0d-6f45-64a1-ca76-000000002c39 | OK | Ensure post-scripts hooks directory exists | standalone | item=certmonger 2026-01-22 12:52:34.920151 | fa163e0d-6f45-64a1-ca76-000000002c39 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:13:28.885156 | 0.30s 2026-01-22 12:52:34.930230 | fa163e0d-6f45-64a1-ca76-000000002c39 | TIMING | linux-system-roles.certificate : Ensure post-scripts hooks directory exists | standalone | 0:13:28.895232 | 0.31s 2026-01-22 12:52:34.959689 | fa163e0d-6f45-64a1-ca76-000000002c3b | TASK | Ensure provider service is running 2026-01-22 12:52:36.410144 | fa163e0d-6f45-64a1-ca76-000000002c3b | OK | Ensure provider service is running | standalone | item=certmonger 2026-01-22 12:52:36.413502 | fa163e0d-6f45-64a1-ca76-000000002c3b | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:13:30.378497 | 1.45s 2026-01-22 12:52:36.423303 | fa163e0d-6f45-64a1-ca76-000000002c3b | TIMING | linux-system-roles.certificate : Ensure provider service is running | standalone | 0:13:30.388314 | 1.46s 2026-01-22 12:52:36.453101 | fa163e0d-6f45-64a1-ca76-000000002c3e | TASK | Ensure certificate requests 2026-01-22 12:52:38.878108 | fa163e0d-6f45-64a1-ca76-000000002c3e | CHANGED | Ensure certificate requests | standalone | item={'ca': 'ipa', 'dns': ['standalone.internalapi.ooo.test', 'overcloud.internalapi.ooo.test'], 'key_size': '2048', 'name': 'redis', 'principal': 'redis/standalone.internalapi.ooo.test@OOO.TEST', 'run_after': 'container_name=$(podman ps --format=\\{\\{.Names\\}\\} | grep redis_tls_proxy)\nservice_crt="/etc/pki/tls/certs/redis.crt"\nservice_key="/etc/pki/tls/private/redis.key"\n# Copy the new cert from the mount-point to the real path\npodman exec "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_crt" "$service_crt"\n# Copy the new cert from the mount-point to the real path\npodman exec "$container_name" cp "/var/lib/kolla/config_files/src-tls$service_key" "$service_key"\n# Set appropriate permissions\npodman exec "$container_name" chown memcached:memcached "$service_crt"\npodman exec "$container_name" chown memcached:memcached "$service_key"\n# Trigger a reload for stunnel to read the new certificate\npodman exec pkill -o -HUP stunnel\n'} 2026-01-22 12:52:38.880443 | fa163e0d-6f45-64a1-ca76-000000002c3e | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:13:32.845457 | 2.43s 2026-01-22 12:52:38.891359 | fa163e0d-6f45-64a1-ca76-000000002c3e | TIMING | linux-system-roles.certificate : Ensure certificate requests | standalone | 0:13:32.856359 | 2.44s 2026-01-22 12:52:38.949577 | fa163e0d-6f45-64a1-ca76-0000000022c0 | TASK | Fail if container image is undefined 2026-01-22 12:52:39.014209 | fa163e0d-6f45-64a1-ca76-0000000022c0 | SKIPPED | Fail if container image is undefined | standalone 2026-01-22 12:52:39.015433 | fa163e0d-6f45-64a1-ca76-0000000022c0 | TIMING | tripleo_container_tag : Fail if container image is undefined | standalone | 0:13:32.980435 | 0.06s 2026-01-22 12:52:39.070601 | fa163e0d-6f45-64a1-ca76-0000000022c1 | TASK | Pull registry.redhat.io/rhosp-rhel9/openstack-redis:17.1 image 2026-01-22 12:52:41.509246 | fa163e0d-6f45-64a1-ca76-0000000022c1 | CHANGED | Pull registry.redhat.io/rhosp-rhel9/openstack-redis:17.1 image | standalone 2026-01-22 12:52:41.510673 | fa163e0d-6f45-64a1-ca76-0000000022c1 | TIMING | tripleo_container_tag : Pull registry.redhat.io/rhosp-rhel9/openstack-redis:17.1 image | standalone | 0:13:35.475656 | 2.44s 2026-01-22 12:52:41.568188 | fa163e0d-6f45-64a1-ca76-0000000022c2 | TASK | Tag cluster.common.tag/redis:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-redis:17.1 image 2026-01-22 12:52:41.913383 | fa163e0d-6f45-64a1-ca76-0000000022c2 | CHANGED | Tag cluster.common.tag/redis:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-redis:17.1 image | standalone 2026-01-22 12:52:41.914809 | fa163e0d-6f45-64a1-ca76-0000000022c2 | TIMING | tripleo_container_tag : Tag cluster.common.tag/redis:pcmklatest to latest registry.redhat.io/rhosp-rhel9/openstack-redis:17.1 image | standalone | 0:13:35.879816 | 0.35s 2026-01-22 12:52:41.970022 | fa163e0d-6f45-64a1-ca76-0000000022cd | TASK | Check if rsyslog exists 2026-01-22 12:52:42.212526 | fa163e0d-6f45-64a1-ca76-0000000022cd | CHANGED | Check if rsyslog exists | standalone 2026-01-22 12:52:42.213953 | fa163e0d-6f45-64a1-ca76-0000000022cd | TIMING | Check if rsyslog exists | standalone | 0:13:36.178962 | 0.24s 2026-01-22 12:52:42.236587 | fa163e0d-6f45-64a1-ca76-0000000022ce | TASK | Forward logging to swift.log file 2026-01-22 12:52:42.655616 | fa163e0d-6f45-64a1-ca76-0000000022ce | CHANGED | Forward logging to swift.log file | standalone 2026-01-22 12:52:42.657676 | fa163e0d-6f45-64a1-ca76-0000000022ce | TIMING | Forward logging to swift.log file | standalone | 0:13:36.622660 | 0.42s 2026-01-22 12:52:42.685998 | fa163e0d-6f45-64a1-ca76-0000000022cf | TASK | Restart rsyslogd service after logging conf change 2026-01-22 12:52:44.399787 | fa163e0d-6f45-64a1-ca76-0000000022cf | CHANGED | Restart rsyslogd service after logging conf change | standalone 2026-01-22 12:52:44.401674 | fa163e0d-6f45-64a1-ca76-0000000022cf | TIMING | Restart rsyslogd service after logging conf change | standalone | 0:13:38.366659 | 1.71s 2026-01-22 12:52:44.431185 | fa163e0d-6f45-64a1-ca76-0000000000c2 | TASK | Check if /var/lib/tripleo-config/container-startup-config/step_1 already exists 2026-01-22 12:52:44.668747 | fa163e0d-6f45-64a1-ca76-0000000000c2 | OK | Check if /var/lib/tripleo-config/container-startup-config/step_1 already exists | standalone 2026-01-22 12:52:44.670194 | fa163e0d-6f45-64a1-ca76-0000000000c2 | TIMING | Check if /var/lib/tripleo-config/container-startup-config/step_1 already exists | standalone | 0:13:38.635202 | 0.24s 2026-01-22 12:52:44.761562 | fa163e0d-6f45-64a1-ca76-0000000000c3 | TIMING | include_tasks | standalone | 0:13:38.726558 | 0.06s 2026-01-22 12:52:44.803462 | 6636a21f-545f-4dd8-85ff-b58bd5bd48a7 | INCLUDED | /root/standalone-ansible-mz1ymllk/common_deploy_steps_tasks.yaml | standalone 2026-01-22 12:52:44.831011 | fa163e0d-6f45-64a1-ca76-000000002ca9 | TASK | Write the config_step hieradata 2026-01-22 12:52:45.315145 | fa163e0d-6f45-64a1-ca76-000000002ca9 | OK | Write the config_step hieradata | standalone 2026-01-22 12:52:45.316535 | fa163e0d-6f45-64a1-ca76-000000002ca9 | TIMING | Write the config_step hieradata | standalone | 0:13:39.281544 | 0.48s 2026-01-22 12:52:45.346218 | fa163e0d-6f45-64a1-ca76-000000002caa | TASK | Run puppet host configuration for step 1 2026-01-22 12:52:45.672586 | fa163e0d-6f45-64a1-ca76-000000002caa | CHANGED | Run puppet host configuration for step 1 | standalone 2026-01-22 12:52:45.673869 | fa163e0d-6f45-64a1-ca76-000000002caa | TIMING | Run puppet host configuration for step 1 | standalone | 0:13:39.638877 | 0.33s 2026-01-22 12:52:45.702896 | fa163e0d-6f45-64a1-ca76-000000002cab | TASK | Wait for puppet host configuration to finish 2026-01-22 12:52:45.955803 | fa163e0d-6f45-64a1-ca76-000000002cab | WAITING | Wait for puppet host configuration to finish | standalone | 360 retries left 2026-01-22 12:52:56.168776 | fa163e0d-6f45-64a1-ca76-000000002cab | WAITING | Wait for puppet host configuration to finish | standalone | 359 retries left 2026-01-22 12:53:06.378258 | fa163e0d-6f45-64a1-ca76-000000002cab | WAITING | Wait for puppet host configuration to finish | standalone | 358 retries left 2026-01-22 12:53:16.604732 | fa163e0d-6f45-64a1-ca76-000000002cab | WAITING | Wait for puppet host configuration to finish | standalone | 357 retries left 2026-01-22 12:53:26.826450 | fa163e0d-6f45-64a1-ca76-000000002cab | WAITING | Wait for puppet host configuration to finish | standalone | 356 retries left 2026-01-22 12:53:37.037740 | fa163e0d-6f45-64a1-ca76-000000002cab | WAITING | Wait for puppet host configuration to finish | standalone | 355 retries left 2026-01-22 12:53:47.268354 | fa163e0d-6f45-64a1-ca76-000000002cab | CHANGED | Wait for puppet host configuration to finish | standalone 2026-01-22 12:53:47.269681 | fa163e0d-6f45-64a1-ca76-000000002cab | TIMING | Wait for puppet host configuration to finish | standalone | 0:14:41.234663 | 61.56s 2026-01-22 12:53:47.302625 | fa163e0d-6f45-64a1-ca76-000000002cac | TASK | Debug output for task: Run puppet host configuration for step 1 2026-01-22 12:53:47.374977 | fa163e0d-6f45-64a1-ca76-000000002cac | CHANGED | Debug output for task: Run puppet host configuration for step 1 | standalone | result={ "changed": true, "failed_when_result": false, "puppet_host_outputs.stdout_lines | default([]) | union(puppet_host_outputs.stderr_lines | default([]))": [ "<13>Jan 22 12:52:45 puppet-user: Warning: /etc/puppet/hiera.yaml: Use of 'hiera.yaml' version 3 is deprecated. It should be converted to version 5", "<13>Jan 22 12:52:51 puppet-user: (file: /etc/puppet/hiera.yaml)", "<13>Jan 22 12:52:51 puppet-user: Warning: Undefined variable '::deploy_config_name'; ", "<13>Jan 22 12:52:51 puppet-user: (file & line not available)", "<13>Jan 22 12:52:51 puppet-user: Warning: The function 'hiera' is deprecated in favor of using 'lookup'. See https://puppet.com/docs/puppet/7.10/deprecated_language.html", "<13>Jan 22 12:52:51 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/profile/base/database/mysql/client.pp, line: 89, column: 8)", "<13>Jan 22 12:52:51 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/packages.pp, line: 39, column: 69)", "<13>Jan 22 12:52:51 puppet-user: Notice: Compiled catalog for standalone.ooo.test in environment production in 0.38 seconds", "<13>Jan 22 12:52:51 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Database::Mysql::Client/Exec[directory-create-etc-my.cnf.d]/returns: executed successfully", "<13>Jan 22 12:52:51 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Database::Mysql::Client/File[/etc/my.cnf.d/tripleo.cnf]/ensure: created", "<13>Jan 22 12:52:51 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Database::Mysql::Client/Augeas[tripleo-mysql-client-conf]/returns: executed successfully", "<13>Jan 22 12:52:59 puppet-user: Notice: /Stage[main]/Pacemaker::Install/Package[pacemaker]/ensure: created", "<13>Jan 22 12:53:07 puppet-user: Notice: /Stage[main]/Pacemaker::Install/Package[pcs]/ensure: created", "<13>Jan 22 12:53:07 puppet-user: Notice: /Stage[main]/Pacemaker::Corosync/File_line[pcsd_bind_addr]/ensure: created", "<13>Jan 22 12:53:07 puppet-user: Notice: /Stage[main]/Pacemaker::Corosync/User[hacluster]/password: ch**********ed [redacted] to [redacted]", "<13>Jan 22 12:53:07 puppet-user: Notice: /Stage[main]/Pacemaker::Corosync/User[hacluster]/groups: groups changed to ['haclient']", "<13>Jan 22 12:53:10 puppet-user: Notice: /Stage[main]/Pacemaker::Service/Service[pcsd]/ensure: ensure changed 'stopped' to 'running'", "<13>Jan 22 12:53:11 puppet-user: Notice: /Stage[main]/Pacemaker::Corosync/Exec[check-for-local-authentication]/returns: executed successfully", "<13>Jan 22 12:53:12 puppet-user: Notice: /Stage[main]/Pacemaker::Corosync/Exec[reauthenticate-across-all-nodes]: Triggered 'refresh' from 3 events", "<13>Jan 22 12:53:15 puppet-user: Notice: /Stage[main]/Pacemaker::Corosync/Exec[Create Cluster tripleo_cluster]/returns: executed successfully", "<13>Jan 22 12:53:17 puppet-user: Notice: /Stage[main]/Pacemaker::Corosync/Exec[Start Cluster tripleo_cluster]/returns: executed successfully", "<13>Jan 22 12:53:18 puppet-user: Notice: /Stage[main]/Pacemaker::Service/Service[corosync]/enable: enable changed 'false' to 'true'", "<13>Jan 22 12:53:18 puppet-user: Notice: /Stage[main]/Pacemaker::Service/Service[pacemaker]/enable: enable changed 'false' to 'true'", "<13>Jan 22 12:53:41 puppet-user: Notice: /Stage[main]/Pacemaker::Corosync/Exec[wait-for-settle]/returns: executed successfully", "<13>Jan 22 12:53:42 puppet-user: Deprecation Warning: This command is deprecated and will be removed. Please use 'pcs property config' instead.", "<13>Jan 22 12:53:44 puppet-user: Notice: /Stage[main]/Pacemaker::Stonith/Pacemaker::Property[Disable STONITH]/Pcmk_property[property--stonith-enabled]/ensure: created", "<13>Jan 22 12:53:44 puppet-user: Notice: Applied catalog in 53.14 seconds", "<13>Jan 22 12:53:44 puppet-user: Application:", "<13>Jan 22 12:53:44 puppet-user: Initial environment: production", "<13>Jan 22 12:53:44 puppet-user: Converged environment: production", "<13>Jan 22 12:53:44 puppet-user: Run mode: user", "<13>Jan 22 12:53:44 puppet-user: Changes:", "<13>Jan 22 12:53:44 puppet-user: Total: 17", "<13>Jan 22 12:53:44 puppet-user: Events:", "<13>Jan 22 12:53:44 puppet-user: Success: 17", "<13>Jan 22 12:53:44 puppet-user: Resources:", "<13>Jan 22 12:53:44 puppet-user: Restarted: 1", "<13>Jan 22 12:53:44 puppet-user: Changed: 16", "<13>Jan 22 12:53:44 puppet-user: Out of sync: 16", "<13>Jan 22 12:53:44 puppet-user: Total: 26", "<13>Jan 22 12:53:44 puppet-user: Time:", "<13>Jan 22 12:53:44 puppet-user: Filebucket: 0.00", "<13>Jan 22 12:53:44 puppet-user: Schedule: 0.00", "<13>Jan 22 12:53:44 puppet-user: File line: 0.00", "<13>Jan 22 12:53:44 puppet-user: File: 0.00", "<13>Jan 22 12:53:44 puppet-user: Augeas: 0.08", "<13>Jan 22 12:53:44 puppet-user: Config retrieval: 0.43", "<13>Jan 22 12:53:44 puppet-user: User: 0.59", "<13>Jan 22 12:53:44 puppet-user: Package: 15.14", "<13>Jan 22 12:53:44 puppet-user: Last run: 1769086424", "<13>Jan 22 12:53:44 puppet-user: Exec: 28.70", "<13>Jan 22 12:53:44 puppet-user: Pcmk property: 3.55", "<13>Jan 22 12:53:44 puppet-user: Service: 3.83", "<13>Jan 22 12:53:44 puppet-user: Transaction evaluation: 53.13", "<13>Jan 22 12:53:44 puppet-user: Catalog application: 53.14", "<13>Jan 22 12:53:44 puppet-user: Total: 53.14", "<13>Jan 22 12:53:44 puppet-user: Version:", "<13>Jan 22 12:53:44 puppet-user: Config: 1769086371", "<13>Jan 22 12:53:44 puppet-user: Puppet: 7.10.0" ] } 2026-01-22 12:53:47.376841 | fa163e0d-6f45-64a1-ca76-000000002cac | TIMING | Debug output for task: Run puppet host configuration for step 1 | standalone | 0:14:41.341849 | 0.07s 2026-01-22 12:53:47.407389 | fa163e0d-6f45-64a1-ca76-000000002cad | TASK | Pre-cache facts for puppet containers 2026-01-22 12:53:47.429797 | fa163e0d-6f45-64a1-ca76-000000002cad | TIMING | Pre-cache facts for puppet containers | standalone | 0:14:41.394802 | 0.02s 2026-01-22 12:53:47.501171 | fa163e0d-6f45-64a1-ca76-000000002cf4 | TASK | Gather variables for each operating system 2026-01-22 12:53:47.581256 | fa163e0d-6f45-64a1-ca76-000000002cf4 | TIMING | tripleo_puppet_cache : Gather variables for each operating system | standalone | 0:14:41.546260 | 0.08s 2026-01-22 12:53:47.610672 | fa163e0d-6f45-64a1-ca76-000000002cf5 | TASK | Create puppet caching structures 2026-01-22 12:53:47.856444 | fa163e0d-6f45-64a1-ca76-000000002cf5 | CHANGED | Create puppet caching structures | standalone 2026-01-22 12:53:47.857630 | fa163e0d-6f45-64a1-ca76-000000002cf5 | TIMING | tripleo_puppet_cache : Create puppet caching structures | standalone | 0:14:41.822639 | 0.25s 2026-01-22 12:53:47.886388 | fa163e0d-6f45-64a1-ca76-000000002cf6 | TASK | Check for facter.conf 2026-01-22 12:53:48.118104 | fa163e0d-6f45-64a1-ca76-000000002cf6 | OK | Check for facter.conf | standalone 2026-01-22 12:53:48.119266 | fa163e0d-6f45-64a1-ca76-000000002cf6 | TIMING | tripleo_puppet_cache : Check for facter.conf | standalone | 0:14:42.084274 | 0.23s 2026-01-22 12:53:48.147893 | fa163e0d-6f45-64a1-ca76-000000002cf7 | TASK | Remove facter.conf if directory 2026-01-22 12:53:48.197364 | fa163e0d-6f45-64a1-ca76-000000002cf7 | SKIPPED | Remove facter.conf if directory | standalone 2026-01-22 12:53:48.198453 | fa163e0d-6f45-64a1-ca76-000000002cf7 | TIMING | tripleo_puppet_cache : Remove facter.conf if directory | standalone | 0:14:42.163460 | 0.05s 2026-01-22 12:53:48.227551 | fa163e0d-6f45-64a1-ca76-000000002cf8 | TASK | Write facter cache config 2026-01-22 12:53:48.725434 | fa163e0d-6f45-64a1-ca76-000000002cf8 | CHANGED | Write facter cache config | standalone 2026-01-22 12:53:48.726752 | fa163e0d-6f45-64a1-ca76-000000002cf8 | TIMING | tripleo_puppet_cache : Write facter cache config | standalone | 0:14:42.691760 | 0.50s 2026-01-22 12:53:48.755167 | fa163e0d-6f45-64a1-ca76-000000002cf9 | TASK | Cleanup facter cache if exists 2026-01-22 12:53:48.998357 | fa163e0d-6f45-64a1-ca76-000000002cf9 | OK | Cleanup facter cache if exists | standalone 2026-01-22 12:53:48.999893 | fa163e0d-6f45-64a1-ca76-000000002cf9 | TIMING | tripleo_puppet_cache : Cleanup facter cache if exists | standalone | 0:14:42.964896 | 0.24s 2026-01-22 12:53:49.028195 | fa163e0d-6f45-64a1-ca76-000000002cfa | TASK | Pre-cache facts 2026-01-22 12:53:49.580120 | fa163e0d-6f45-64a1-ca76-000000002cfa | CHANGED | Pre-cache facts | standalone 2026-01-22 12:53:49.581407 | fa163e0d-6f45-64a1-ca76-000000002cfa | TIMING | tripleo_puppet_cache : Pre-cache facts | standalone | 0:14:43.546415 | 0.55s 2026-01-22 12:53:49.611430 | fa163e0d-6f45-64a1-ca76-000000002cfb | TASK | Failed deployment if facter fails 2026-01-22 12:53:49.644506 | fa163e0d-6f45-64a1-ca76-000000002cfb | SKIPPED | Failed deployment if facter fails | standalone 2026-01-22 12:53:49.645769 | fa163e0d-6f45-64a1-ca76-000000002cfb | TIMING | tripleo_puppet_cache : Failed deployment if facter fails | standalone | 0:14:43.610777 | 0.03s 2026-01-22 12:53:49.695509 | fa163e0d-6f45-64a1-ca76-000000002cfc | TASK | Sync cached facts 2026-01-22 12:53:50.186273 | fa163e0d-6f45-64a1-ca76-000000002cfc | CHANGED | Sync cached facts | standalone -> 192.168.122.100 [WARNING]: ('standalone -> 192.168.122.100', 'fa163e0d-6f45-64a1-ca76-000000002cfc') missing from stats 2026-01-22 12:53:50.240946 | fa163e0d-6f45-64a1-ca76-000000002cae | TASK | Include container-puppet tasks (generate config) during step 1 2026-01-22 12:53:50.290162 | fa163e0d-6f45-64a1-ca76-000000002cae | TIMING | Include container-puppet tasks (generate config) during step 1 | standalone | 0:14:44.255166 | 0.05s 2026-01-22 12:53:50.319979 | a444e076-7f71-4835-98a8-e675c0dcef71 | INCLUDED | /root/standalone-ansible-mz1ymllk/generate-config-tasks.yaml | standalone 2026-01-22 12:53:50.356882 | fa163e0d-6f45-64a1-ca76-000000002d46 | TASK | Create base directory puppet configs 2026-01-22 12:53:50.572382 | fa163e0d-6f45-64a1-ca76-000000002d46 | CHANGED | Create base directory puppet configs | standalone 2026-01-22 12:53:50.573635 | fa163e0d-6f45-64a1-ca76-000000002d46 | TIMING | Create base directory puppet configs | standalone | 0:14:44.538644 | 0.22s 2026-01-22 12:53:50.602900 | fa163e0d-6f45-64a1-ca76-000000002d47 | TASK | Generate container puppet configs for step 1 2026-01-22 12:53:50.881223 | fa163e0d-6f45-64a1-ca76-000000002d47 | CHANGED | Generate container puppet configs for step 1 | standalone 2026-01-22 12:53:50.882548 | fa163e0d-6f45-64a1-ca76-000000002d47 | TIMING | Generate container puppet configs for step 1 | standalone | 0:14:44.847557 | 0.28s 2026-01-22 12:53:50.913460 | fa163e0d-6f45-64a1-ca76-000000002d48 | TASK | Manage Puppet containers (generate config) for step 1 with tripleo-ansible 2026-01-22 12:53:50.942866 | fa163e0d-6f45-64a1-ca76-000000002d48 | TIMING | Manage Puppet containers (generate config) for step 1 with tripleo-ansible | standalone | 0:14:44.907869 | 0.03s 2026-01-22 12:53:51.019669 | fa163e0d-6f45-64a1-ca76-000000002dd2 | TASK | Gather variables for each operating system 2026-01-22 12:53:51.130454 | fa163e0d-6f45-64a1-ca76-000000002dd2 | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:14:45.095457 | 0.11s 2026-01-22 12:53:51.159470 | fa163e0d-6f45-64a1-ca76-000000002dd3 | TASK | Create container logs path 2026-01-22 12:53:51.392991 | fa163e0d-6f45-64a1-ca76-000000002dd3 | OK | Create container logs path | standalone 2026-01-22 12:53:51.394523 | fa163e0d-6f45-64a1-ca76-000000002dd3 | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:14:45.359529 | 0.23s 2026-01-22 12:53:51.428279 | fa163e0d-6f45-64a1-ca76-000000002dd5 | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_1 2026-01-22 12:53:51.864547 | fa163e0d-6f45-64a1-ca76-000000002dd5 | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_1 | standalone 2026-01-22 12:53:51.866009 | fa163e0d-6f45-64a1-ca76-000000002dd5 | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_1 | standalone | 0:14:45.831018 | 0.44s 2026-01-22 12:53:51.897216 | fa163e0d-6f45-64a1-ca76-000000002dd6 | TASK | Finalise hashes for all containers 2026-01-22 12:53:51.979451 | fa163e0d-6f45-64a1-ca76-000000002dd6 | OK | Finalise hashes for all containers | standalone 2026-01-22 12:53:51.980592 | fa163e0d-6f45-64a1-ca76-000000002dd6 | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:14:45.945602 | 0.08s 2026-01-22 12:53:52.009431 | fa163e0d-6f45-64a1-ca76-000000002dd8 | TASK | Manage systemd shutdown files 2026-01-22 12:53:52.049296 | fa163e0d-6f45-64a1-ca76-000000002dd8 | SKIPPED | Manage systemd shutdown files | standalone 2026-01-22 12:53:52.050487 | fa163e0d-6f45-64a1-ca76-000000002dd8 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:14:46.015495 | 0.04s 2026-01-22 12:53:52.080177 | fa163e0d-6f45-64a1-ca76-000000002dda | TASK | Update container configs with new config hashes 2026-01-22 12:53:52.140957 | fa163e0d-6f45-64a1-ca76-000000002dda | SKIPPED | Update container configs with new config hashes | standalone 2026-01-22 12:53:52.142083 | fa163e0d-6f45-64a1-ca76-000000002dda | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:14:46.107089 | 0.06s 2026-01-22 12:53:52.173533 | fa163e0d-6f45-64a1-ca76-000000002ddb | TASK | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_1 2026-01-22 12:53:52.226092 | fa163e0d-6f45-64a1-ca76-000000002ddb | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_1 | standalone | 0:14:46.191087 | 0.05s 2026-01-22 12:53:52.251852 | a0b2898a-89e5-4234-9291-279d1ed6dfea | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/delete_orphan.yml | standalone 2026-01-22 12:53:52.287743 | fa163e0d-6f45-64a1-ca76-000000002e1f | TASK | Gather podman infos 2026-01-22 12:53:52.872117 | fa163e0d-6f45-64a1-ca76-000000002e1f | OK | Gather podman infos | standalone 2026-01-22 12:53:52.873787 | fa163e0d-6f45-64a1-ca76-000000002e1f | TIMING | tripleo_container_manage : Gather podman infos | standalone | 0:14:46.838789 | 0.58s 2026-01-22 12:53:52.908025 | fa163e0d-6f45-64a1-ca76-000000002e20 | TASK | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-puppet-config/step_1 2026-01-22 12:53:52.969847 | fa163e0d-6f45-64a1-ca76-000000002e20 | SKIPPED | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-puppet-config/step_1 | standalone 2026-01-22 12:53:52.971028 | fa163e0d-6f45-64a1-ca76-000000002e20 | TIMING | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-puppet-config/step_1 | standalone | 0:14:46.936035 | 0.06s 2026-01-22 12:53:53.003170 | fa163e0d-6f45-64a1-ca76-000000002ddc | TASK | Create containers from /var/lib/tripleo-config/container-puppet-config/step_1 2026-01-22 12:53:53.066679 | fa163e0d-6f45-64a1-ca76-000000002ddc | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-puppet-config/step_1 | standalone | 0:14:47.031660 | 0.06s 2026-01-22 12:53:53.089932 | 31fa2161-76ca-4ba0-8d0f-f6ca9a627458 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/create.yml | standalone 2026-01-22 12:53:53.132160 | fa163e0d-6f45-64a1-ca76-000000002e49 | TASK | Create containers managed by Podman for /var/lib/tripleo-config/container-puppet-config/step_1 2026-01-22 12:54:23.987580 | fa163e0d-6f45-64a1-ca76-000000002e49 | CHANGED | Create containers managed by Podman for /var/lib/tripleo-config/container-puppet-config/step_1 | standalone 2026-01-22 12:54:23.989324 | fa163e0d-6f45-64a1-ca76-000000002e49 | TIMING | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-puppet-config/step_1 | standalone | 0:15:17.954331 | 30.86s 2026-01-22 12:54:24.064552 | fa163e0d-6f45-64a1-ca76-000000002e4a | TASK | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-puppet-config/step_1 2026-01-22 12:54:24.150521 | fa163e0d-6f45-64a1-ca76-000000002e4a | SKIPPED | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-puppet-config/step_1 | standalone 2026-01-22 12:54:24.151504 | fa163e0d-6f45-64a1-ca76-000000002e4a | TIMING | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-puppet-config/step_1 | standalone | 0:15:18.116518 | 0.09s 2026-01-22 12:54:24.201487 | fa163e0d-6f45-64a1-ca76-000000002d4a | TASK | Diff puppet-generated changes for check mode 2026-01-22 12:54:24.231851 | fa163e0d-6f45-64a1-ca76-000000002d4a | SKIPPED | Diff puppet-generated changes for check mode | standalone 2026-01-22 12:54:24.232751 | fa163e0d-6f45-64a1-ca76-000000002d4a | TIMING | Diff puppet-generated changes for check mode | standalone | 0:15:18.197765 | 0.03s 2026-01-22 12:54:24.255079 | fa163e0d-6f45-64a1-ca76-000000002d4b | TASK | Diff puppet-generated changes for check mode 2026-01-22 12:54:24.285921 | fa163e0d-6f45-64a1-ca76-000000002d4b | SKIPPED | Diff puppet-generated changes for check mode | standalone 2026-01-22 12:54:24.286674 | fa163e0d-6f45-64a1-ca76-000000002d4b | TIMING | Diff puppet-generated changes for check mode | standalone | 0:15:18.251673 | 0.03s 2026-01-22 12:54:24.309966 | fa163e0d-6f45-64a1-ca76-000000002cb0 | TASK | Manage containers for step 1 with tripleo-ansible 2026-01-22 12:54:24.336376 | fa163e0d-6f45-64a1-ca76-000000002cb0 | TIMING | Manage containers for step 1 with tripleo-ansible | standalone | 0:15:18.301388 | 0.03s 2026-01-22 12:54:24.392382 | fa163e0d-6f45-64a1-ca76-000000002e9f | TASK | Gather variables for each operating system 2026-01-22 12:54:24.515414 | fa163e0d-6f45-64a1-ca76-000000002e9f | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:15:18.480416 | 0.12s 2026-01-22 12:54:24.544565 | fa163e0d-6f45-64a1-ca76-000000002ea0 | TASK | Create container logs path 2026-01-22 12:54:24.809813 | fa163e0d-6f45-64a1-ca76-000000002ea0 | OK | Create container logs path | standalone 2026-01-22 12:54:24.811176 | fa163e0d-6f45-64a1-ca76-000000002ea0 | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:15:18.776185 | 0.27s 2026-01-22 12:54:24.842386 | fa163e0d-6f45-64a1-ca76-000000002ea2 | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_1 2026-01-22 12:54:25.094344 | fa163e0d-6f45-64a1-ca76-000000002ea2 | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_1 | standalone 2026-01-22 12:54:25.096102 | fa163e0d-6f45-64a1-ca76-000000002ea2 | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_1 | standalone | 0:15:19.061103 | 0.25s 2026-01-22 12:54:25.125067 | fa163e0d-6f45-64a1-ca76-000000002ea3 | TASK | Finalise hashes for all containers 2026-01-22 12:54:25.186341 | fa163e0d-6f45-64a1-ca76-000000002ea3 | OK | Finalise hashes for all containers | standalone 2026-01-22 12:54:25.187529 | fa163e0d-6f45-64a1-ca76-000000002ea3 | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:15:19.152538 | 0.06s 2026-01-22 12:54:25.216730 | fa163e0d-6f45-64a1-ca76-000000002ea5 | TASK | Manage systemd shutdown files 2026-01-22 12:54:25.249531 | fa163e0d-6f45-64a1-ca76-000000002ea5 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:15:19.214536 | 0.03s 2026-01-22 12:54:25.284518 | ec6124cc-cff7-4432-90d3-d7e65ae8a673 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/shutdown.yml | standalone 2026-01-22 12:54:25.312741 | fa163e0d-6f45-64a1-ca76-000000002ed8 | TASK | Check if /etc/sysconfig/podman_drop_in exists 2026-01-22 12:54:25.566104 | fa163e0d-6f45-64a1-ca76-000000002ed8 | OK | Check if /etc/sysconfig/podman_drop_in exists | standalone 2026-01-22 12:54:25.567178 | fa163e0d-6f45-64a1-ca76-000000002ed8 | TIMING | tripleo_container_manage : Check if /etc/sysconfig/podman_drop_in exists | standalone | 0:15:19.532193 | 0.25s 2026-01-22 12:54:25.589744 | fa163e0d-6f45-64a1-ca76-000000002ed9 | TASK | Set podman_drop_in fact 2026-01-22 12:54:25.651903 | fa163e0d-6f45-64a1-ca76-000000002ed9 | OK | Set podman_drop_in fact | standalone 2026-01-22 12:54:25.652900 | fa163e0d-6f45-64a1-ca76-000000002ed9 | TIMING | tripleo_container_manage : Set podman_drop_in fact | standalone | 0:15:19.617916 | 0.06s 2026-01-22 12:54:25.675503 | fa163e0d-6f45-64a1-ca76-000000002edb | TASK | Deploy tripleo-container-shutdown and tripleo-start-podman-container 2026-01-22 12:54:26.195672 | fa163e0d-6f45-64a1-ca76-000000002edb | CHANGED | Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | item=tripleo-container-shutdown 2026-01-22 12:54:26.197228 | fa163e0d-6f45-64a1-ca76-000000002edb | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:15:20.162230 | 0.52s 2026-01-22 12:54:26.710156 | fa163e0d-6f45-64a1-ca76-000000002edb | CHANGED | Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | item=tripleo-start-podman-container 2026-01-22 12:54:26.711258 | fa163e0d-6f45-64a1-ca76-000000002edb | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:15:20.676269 | 1.03s 2026-01-22 12:54:26.718002 | fa163e0d-6f45-64a1-ca76-000000002edb | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:15:20.683014 | 1.04s 2026-01-22 12:54:26.747115 | fa163e0d-6f45-64a1-ca76-000000002edc | TASK | Create /usr/lib/systemd/system/tripleo-container-shutdown.service 2026-01-22 12:54:27.268247 | fa163e0d-6f45-64a1-ca76-000000002edc | CHANGED | Create /usr/lib/systemd/system/tripleo-container-shutdown.service | standalone 2026-01-22 12:54:27.269200 | fa163e0d-6f45-64a1-ca76-000000002edc | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system/tripleo-container-shutdown.service | standalone | 0:15:21.234215 | 0.52s 2026-01-22 12:54:27.291615 | fa163e0d-6f45-64a1-ca76-000000002edd | TASK | Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset 2026-01-22 12:54:27.747753 | fa163e0d-6f45-64a1-ca76-000000002edd | CHANGED | Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset | standalone 2026-01-22 12:54:27.748609 | fa163e0d-6f45-64a1-ca76-000000002edd | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset | standalone | 0:15:21.713623 | 0.46s 2026-01-22 12:54:27.777534 | fa163e0d-6f45-64a1-ca76-000000002ede | TASK | Enable and start tripleo-container-shutdown 2026-01-22 12:54:28.662740 | fa163e0d-6f45-64a1-ca76-000000002ede | CHANGED | Enable and start tripleo-container-shutdown | standalone 2026-01-22 12:54:28.664711 | fa163e0d-6f45-64a1-ca76-000000002ede | TIMING | tripleo_container_manage : Enable and start tripleo-container-shutdown | standalone | 0:15:22.629713 | 0.89s 2026-01-22 12:54:28.694847 | fa163e0d-6f45-64a1-ca76-000000002edf | TASK | Create /usr/lib/systemd/system/netns-placeholder.service 2026-01-22 12:54:29.149511 | fa163e0d-6f45-64a1-ca76-000000002edf | CHANGED | Create /usr/lib/systemd/system/netns-placeholder.service | standalone 2026-01-22 12:54:29.150281 | fa163e0d-6f45-64a1-ca76-000000002edf | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system/netns-placeholder.service | standalone | 0:15:23.115285 | 0.45s 2026-01-22 12:54:29.169474 | fa163e0d-6f45-64a1-ca76-000000002ee0 | TASK | Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset 2026-01-22 12:54:29.613240 | fa163e0d-6f45-64a1-ca76-000000002ee0 | CHANGED | Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset | standalone 2026-01-22 12:54:29.614576 | fa163e0d-6f45-64a1-ca76-000000002ee0 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset | standalone | 0:15:23.579585 | 0.44s 2026-01-22 12:54:29.643945 | fa163e0d-6f45-64a1-ca76-000000002ee1 | TASK | Enable and start netns-placeholder 2026-01-22 12:54:30.589921 | fa163e0d-6f45-64a1-ca76-000000002ee1 | CHANGED | Enable and start netns-placeholder | standalone 2026-01-22 12:54:30.591991 | fa163e0d-6f45-64a1-ca76-000000002ee1 | TIMING | tripleo_container_manage : Enable and start netns-placeholder | standalone | 0:15:24.556997 | 0.95s 2026-01-22 12:54:30.622605 | fa163e0d-6f45-64a1-ca76-000000002ea7 | TASK | Update container configs with new config hashes 2026-01-22 12:54:30.677110 | fa163e0d-6f45-64a1-ca76-000000002ea7 | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:15:24.642112 | 0.05s 2026-01-22 12:54:30.697595 | 6105b357-7646-4e78-990b-80f3628fb422 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/puppet_config.yml | standalone 2026-01-22 12:54:30.733362 | fa163e0d-6f45-64a1-ca76-000000002f09 | TASK | Update config hashes for container startup configs 2026-01-22 12:54:31.040679 | | WARNING | Config change detected for memcached, new hash: a226265c45722b4c809790869a4b047f 2026-01-22 12:54:31.041884 | | WARNING | Config change detected for mysql_bootstrap, new hash: 95afe505be5c96904b4ad6ce9fce2f06 2026-01-22 12:54:31.042679 | | WARNING | Config change detected for rabbitmq_bootstrap, new hash: a88464c3f0a0cb7bcf03278f8b57b6bd 2026-01-22 12:54:31.043432 | | WARNING | Config change detected for redis_tls_proxy, new hash: 95b959c17471758f0a78d56163826eb0 2026-01-22 12:54:31.044247 | | WARNING | Config change detected for clustercheck, new hash: 197f26607d8b7ea30e3428e4a0230fe2 2026-01-22 12:54:31.045118 | | WARNING | Config change detected for horizon_fix_perms, new hash: f9e89f2e27f10f2e44ba2434121fb451 2026-01-22 12:54:31.045973 | | WARNING | Config change detected for mysql_wait_bundle, new hash: 95afe505be5c96904b4ad6ce9fce2f06 2026-01-22 12:54:31.046752 | | WARNING | Config change detected for octavia_api_init_dirs, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.047501 | | WARNING | Config change detected for octavia_health_manager_init_dirs, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.048275 | | WARNING | Config change detected for octavia_housekeeping_init_dirs, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.049032 | | WARNING | Config change detected for octavia_worker_init_dirs, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.049833 | | WARNING | Config change detected for aodh_db_sync, new hash: 1558a1f244ae20f0808d18153486a46b 2026-01-22 12:54:31.050592 | | WARNING | Config change detected for cinder_api_db_sync, new hash: 60452fc827fb92ac24d84c1a0cdc4b74 2026-01-22 12:54:31.051377 | | WARNING | Config change detected for glance_api_db_sync, new hash: 1ee20222d3d15cd67fee270464696f83-13534690ca65620ec2f13f46f80a2327 2026-01-22 12:54:31.052164 | | WARNING | Config change detected for heat_engine_db_sync, new hash: ab84b136791271c7292d01010ba61821 2026-01-22 12:54:31.052936 | | WARNING | Config change detected for horizon, new hash: f9e89f2e27f10f2e44ba2434121fb451 2026-01-22 12:54:31.053717 | | WARNING | Config change detected for iscsid, new hash: 13534690ca65620ec2f13f46f80a2327 2026-01-22 12:54:31.054432 | | WARNING | Config change detected for keystone, new hash: 63f940018bc3791f7b28543657522f72 2026-01-22 12:54:31.055185 | | WARNING | Config change detected for keystone_cron, new hash: 63f940018bc3791f7b28543657522f72 2026-01-22 12:54:31.055958 | | WARNING | Config change detected for keystone_db_sync, new hash: 63f940018bc3791f7b28543657522f72 2026-01-22 12:54:31.056725 | | WARNING | Config change detected for manila_api_db_sync, new hash: 0cb612dca50f949326e3fa54048cbd58 2026-01-22 12:54:31.057566 | | WARNING | Config change detected for neutron_db_sync, new hash: 816cae9aa98c879ab61893234af2fc76 2026-01-22 12:54:31.058405 | | WARNING | Config change detected for nova_api_db_sync, new hash: edc2c2c3517396fe9992c9d2a646d9a6 2026-01-22 12:54:31.059222 | | WARNING | Config change detected for nova_api_ensure_default_cells, new hash: edc2c2c3517396fe9992c9d2a646d9a6 2026-01-22 12:54:31.060055 | | WARNING | Config change detected for nova_db_sync, new hash: edc2c2c3517396fe9992c9d2a646d9a6 2026-01-22 12:54:31.060963 | | WARNING | Config change detected for nova_virtlogd_wrapper, new hash: f0692603e6075aae6a915735d50dad21 2026-01-22 12:54:31.061828 | | WARNING | Config change detected for nova_virtnodedevd, new hash: f0692603e6075aae6a915735d50dad21 2026-01-22 12:54:31.062625 | | WARNING | Config change detected for nova_virtproxyd, new hash: f0692603e6075aae6a915735d50dad21 2026-01-22 12:54:31.063501 | | WARNING | Config change detected for nova_virtqemud, new hash: f0692603e6075aae6a915735d50dad21 2026-01-22 12:54:31.064318 | | WARNING | Config change detected for nova_virtsecretd, new hash: f0692603e6075aae6a915735d50dad21 2026-01-22 12:54:31.065182 | | WARNING | Config change detected for nova_virtstoraged, new hash: f0692603e6075aae6a915735d50dad21 2026-01-22 12:54:31.066109 | | WARNING | Config change detected for octavia_db_sync, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.066919 | | WARNING | Config change detected for placement_api_db_sync, new hash: 416e71903f0bfceff1acc01cfffc9748 2026-01-22 12:54:31.067777 | | WARNING | Config change detected for swift_copy_rings, new hash: 013f1efdff6f9bbbf3ecd4b887aadd1f-09e5ddb5f23f110c2ea3b1166b93460e 2026-01-22 12:54:31.068576 | | WARNING | Config change detected for aodh_api, new hash: 1558a1f244ae20f0808d18153486a46b 2026-01-22 12:54:31.069426 | | WARNING | Config change detected for aodh_api_cron, new hash: 1558a1f244ae20f0808d18153486a46b 2026-01-22 12:54:31.070258 | | WARNING | Config change detected for aodh_evaluator, new hash: 1558a1f244ae20f0808d18153486a46b 2026-01-22 12:54:31.071042 | | WARNING | Config change detected for aodh_listener, new hash: 1558a1f244ae20f0808d18153486a46b 2026-01-22 12:54:31.082597 | | WARNING | Config change detected for aodh_notifier, new hash: 1558a1f244ae20f0808d18153486a46b 2026-01-22 12:54:31.083999 | | WARNING | Config change detected for ceilometer_agent_central, new hash: c5ac86781614228c8f4c9cc39f8e6287 2026-01-22 12:54:31.084578 | | WARNING | Config change detected for ceilometer_agent_compute, new hash: c5ac86781614228c8f4c9cc39f8e6287 2026-01-22 12:54:31.085180 | | WARNING | Config change detected for ceilometer_agent_notification, new hash: c5ac86781614228c8f4c9cc39f8e6287 2026-01-22 12:54:31.085851 | | WARNING | Config change detected for cinder_api, new hash: 60452fc827fb92ac24d84c1a0cdc4b74 2026-01-22 12:54:31.086387 | | WARNING | Config change detected for cinder_api_cron, new hash: 60452fc827fb92ac24d84c1a0cdc4b74 2026-01-22 12:54:31.086920 | | WARNING | Config change detected for cinder_scheduler, new hash: 60452fc827fb92ac24d84c1a0cdc4b74 2026-01-22 12:54:31.087397 | | WARNING | Config change detected for glance_api, new hash: 1ee20222d3d15cd67fee270464696f83-13534690ca65620ec2f13f46f80a2327 2026-01-22 12:54:31.088591 | | WARNING | Config change detected for glance_api_cron, new hash: 1ee20222d3d15cd67fee270464696f83 2026-01-22 12:54:31.089122 | | WARNING | Config change detected for glance_api_internal, new hash: 902a3b58209a8f31b26ac1b371dc0559-13534690ca65620ec2f13f46f80a2327 2026-01-22 12:54:31.089676 | | WARNING | Config change detected for glance_api_internal_tls_proxy, new hash: 902a3b58209a8f31b26ac1b371dc0559 2026-01-22 12:54:31.090173 | | WARNING | Config change detected for glance_api_tls_proxy, new hash: 1ee20222d3d15cd67fee270464696f83 2026-01-22 12:54:31.090680 | | WARNING | Config change detected for heat_api, new hash: 0389ae7a287d419176161ca691a66e9b 2026-01-22 12:54:31.091233 | | WARNING | Config change detected for heat_api_cfn, new hash: 8fb36315a00d8d46013c682103593a5c 2026-01-22 12:54:31.091793 | | WARNING | Config change detected for heat_api_cron, new hash: 0389ae7a287d419176161ca691a66e9b 2026-01-22 12:54:31.092311 | | WARNING | Config change detected for heat_engine, new hash: ab84b136791271c7292d01010ba61821 2026-01-22 12:54:31.092823 | | WARNING | Config change detected for logrotate_crond, new hash: 53ed83bb0cae779ff95edb2002262c6f 2026-01-22 12:54:31.093292 | | WARNING | Config change detected for manila_api, new hash: 0cb612dca50f949326e3fa54048cbd58 2026-01-22 12:54:31.093867 | | WARNING | Config change detected for manila_api_cron, new hash: 0cb612dca50f949326e3fa54048cbd58 2026-01-22 12:54:31.094338 | | WARNING | Config change detected for manila_scheduler, new hash: 0cb612dca50f949326e3fa54048cbd58 2026-01-22 12:54:31.094844 | | WARNING | Config change detected for neutron_api, new hash: 816cae9aa98c879ab61893234af2fc76 2026-01-22 12:54:31.095337 | | WARNING | Config change detected for neutron_dhcp, new hash: 816cae9aa98c879ab61893234af2fc76 2026-01-22 12:54:31.096053 | | WARNING | Config change detected for neutron_server_tls_proxy, new hash: 816cae9aa98c879ab61893234af2fc76 2026-01-22 12:54:31.096788 | | WARNING | Config change detected for neutron_sriov_agent, new hash: 816cae9aa98c879ab61893234af2fc76 2026-01-22 12:54:31.097501 | | WARNING | Config change detected for nova_api, new hash: edc2c2c3517396fe9992c9d2a646d9a6 2026-01-22 12:54:31.098223 | | WARNING | Config change detected for nova_api_cron, new hash: edc2c2c3517396fe9992c9d2a646d9a6 2026-01-22 12:54:31.098958 | | WARNING | Config change detected for nova_conductor, new hash: edc2c2c3517396fe9992c9d2a646d9a6 2026-01-22 12:54:31.099579 | | WARNING | Config change detected for nova_metadata, new hash: 3ea10c4b65dc846f0d40a96be5f9d1ff 2026-01-22 12:54:31.100148 | | WARNING | Config change detected for nova_migration_target, new hash: f0692603e6075aae6a915735d50dad21 2026-01-22 12:54:31.100721 | | WARNING | Config change detected for nova_scheduler, new hash: edc2c2c3517396fe9992c9d2a646d9a6 2026-01-22 12:54:31.101248 | | WARNING | Config change detected for nova_vnc_proxy, new hash: edc2c2c3517396fe9992c9d2a646d9a6 2026-01-22 12:54:31.101839 | | WARNING | Config change detected for nova_wait_for_api_service, new hash: edc2c2c3517396fe9992c9d2a646d9a6 2026-01-22 12:54:31.102365 | | WARNING | Config change detected for octavia_api, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.102927 | | WARNING | Config change detected for octavia_driver_agent, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.103449 | | WARNING | Config change detected for ovn_metadata_agent, new hash: 816cae9aa98c879ab61893234af2fc76 2026-01-22 12:54:31.104020 | | WARNING | Config change detected for placement_api, new hash: 416e71903f0bfceff1acc01cfffc9748 2026-01-22 12:54:31.104536 | | WARNING | Config change detected for placement_wait_for_service, new hash: 416e71903f0bfceff1acc01cfffc9748 2026-01-22 12:54:31.105126 | | WARNING | Config change detected for swift_account_reaper, new hash: 013f1efdff6f9bbbf3ecd4b887aadd1f 2026-01-22 12:54:31.105705 | | WARNING | Config change detected for swift_account_server, new hash: 013f1efdff6f9bbbf3ecd4b887aadd1f 2026-01-22 12:54:31.106239 | | WARNING | Config change detected for swift_container_server, new hash: 013f1efdff6f9bbbf3ecd4b887aadd1f 2026-01-22 12:54:31.106782 | | WARNING | Config change detected for swift_container_updater, new hash: 013f1efdff6f9bbbf3ecd4b887aadd1f 2026-01-22 12:54:31.107300 | | WARNING | Config change detected for swift_object_expirer, new hash: 013f1efdff6f9bbbf3ecd4b887aadd1f 2026-01-22 12:54:31.107848 | | WARNING | Config change detected for swift_object_server, new hash: 013f1efdff6f9bbbf3ecd4b887aadd1f 2026-01-22 12:54:31.108363 | | WARNING | Config change detected for swift_object_updater, new hash: 013f1efdff6f9bbbf3ecd4b887aadd1f 2026-01-22 12:54:31.108918 | | WARNING | Config change detected for swift_proxy, new hash: 013f1efdff6f9bbbf3ecd4b887aadd1f 2026-01-22 12:54:31.109464 | | WARNING | Config change detected for swift_proxy_tls_proxy, new hash: 013f1efdff6f9bbbf3ecd4b887aadd1f 2026-01-22 12:54:31.110016 | | WARNING | Config change detected for ceilometer_gnocchi_upgrade, new hash: c5ac86781614228c8f4c9cc39f8e6287 2026-01-22 12:54:31.142869 | | WARNING | Config change detected for gnocchi_api, new hash: 657b23108a7aecb1e4d4d320f81f7a15 2026-01-22 12:54:31.143707 | | WARNING | Config change detected for gnocchi_db_sync, new hash: 657b23108a7aecb1e4d4d320f81f7a15 2026-01-22 12:54:31.144166 | | WARNING | Config change detected for gnocchi_metricd, new hash: 657b23108a7aecb1e4d4d320f81f7a15 2026-01-22 12:54:31.144526 | | WARNING | Config change detected for gnocchi_statsd, new hash: 657b23108a7aecb1e4d4d320f81f7a15 2026-01-22 12:54:31.144911 | | WARNING | Config change detected for nova_compute, new hash: 13534690ca65620ec2f13f46f80a2327-f0692603e6075aae6a915735d50dad21 2026-01-22 12:54:31.145443 | | WARNING | Config change detected for nova_wait_for_compute_service, new hash: f0692603e6075aae6a915735d50dad21 2026-01-22 12:54:31.145892 | | WARNING | Config change detected for octavia_health_manager, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.146295 | | WARNING | Config change detected for octavia_housekeeping, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.146639 | | WARNING | Config change detected for octavia_rsyslog, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.147001 | | WARNING | Config change detected for octavia_worker, new hash: ce661c529311f0a10f93380e3f5fe3b5 2026-01-22 12:54:31.147444 | fa163e0d-6f45-64a1-ca76-000000002f09 | CHANGED | Update config hashes for container startup configs | standalone 2026-01-22 12:54:31.148168 | fa163e0d-6f45-64a1-ca76-000000002f09 | TIMING | tripleo_container_manage : Update config hashes for container startup configs | standalone | 0:15:25.113172 | 0.41s 2026-01-22 12:54:31.178740 | fa163e0d-6f45-64a1-ca76-000000002ea8 | TASK | Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_1 2026-01-22 12:54:31.228545 | fa163e0d-6f45-64a1-ca76-000000002ea8 | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_1 | standalone | 0:15:25.193545 | 0.05s 2026-01-22 12:54:31.245241 | b62e75b2-e7f6-41ed-bfa2-3b452e14349a | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/delete_orphan.yml | standalone 2026-01-22 12:54:31.275445 | fa163e0d-6f45-64a1-ca76-000000002f28 | TASK | Gather podman infos 2026-01-22 12:54:32.155321 | fa163e0d-6f45-64a1-ca76-000000002f28 | OK | Gather podman infos | standalone 2026-01-22 12:54:32.156490 | fa163e0d-6f45-64a1-ca76-000000002f28 | TIMING | tripleo_container_manage : Gather podman infos | standalone | 0:15:26.121495 | 0.88s 2026-01-22 12:54:32.221449 | fa163e0d-6f45-64a1-ca76-000000002f29 | TASK | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_1 2026-01-22 12:54:32.284248 | fa163e0d-6f45-64a1-ca76-000000002f29 | TIMING | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_1 | standalone | 0:15:26.249249 | 0.06s 2026-01-22 12:54:32.408268 | fa163e0d-6f45-64a1-ca76-000000002f72 | TIMING | tripleo_container_rm : include_tasks | standalone | 0:15:26.373264 | 0.06s 2026-01-22 12:54:32.469810 | fa163e0d-6f45-64a1-ca76-000000002ea9 | TASK | Create containers from /var/lib/tripleo-config/container-startup-config/step_1 2026-01-22 12:54:32.522168 | fa163e0d-6f45-64a1-ca76-000000002ea9 | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-startup-config/step_1 | standalone | 0:15:26.487165 | 0.05s 2026-01-22 12:54:32.544810 | 96c3680b-b197-403c-8f6b-c181298a3b4d | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/create.yml | standalone 2026-01-22 12:54:32.581759 | fa163e0d-6f45-64a1-ca76-000000002f98 | TASK | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_1 2026-01-22 12:54:40.501796 | fa163e0d-6f45-64a1-ca76-000000002f98 | CHANGED | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_1 | standalone 2026-01-22 12:54:40.503469 | fa163e0d-6f45-64a1-ca76-000000002f98 | TIMING | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_1 | standalone | 0:15:34.468476 | 7.92s 2026-01-22 12:54:40.534785 | fa163e0d-6f45-64a1-ca76-000000002f99 | TASK | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_1 2026-01-22 12:54:46.066392 | fa163e0d-6f45-64a1-ca76-000000002f99 | CHANGED | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_1 | standalone 2026-01-22 12:54:46.068135 | fa163e0d-6f45-64a1-ca76-000000002f99 | TIMING | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_1 | standalone | 0:15:40.033143 | 5.53s 2026-01-22 12:54:46.142779 | fa163e0d-6f45-64a1-ca76-000000002cb2 | TASK | Clean container_puppet_tasks for standalone step 1 2026-01-22 12:54:46.388467 | fa163e0d-6f45-64a1-ca76-000000002cb2 | OK | Clean container_puppet_tasks for standalone step 1 | standalone 2026-01-22 12:54:46.389883 | fa163e0d-6f45-64a1-ca76-000000002cb2 | TIMING | Clean container_puppet_tasks for standalone step 1 | standalone | 0:15:40.354890 | 0.25s 2026-01-22 12:54:46.438007 | fa163e0d-6f45-64a1-ca76-000000002cb3 | TASK | Calculate container_puppet_tasks for standalone step 1 2026-01-22 12:54:46.488066 | fa163e0d-6f45-64a1-ca76-000000002cb3 | TIMING | Calculate container_puppet_tasks for standalone step 1 | standalone | 0:15:40.453072 | 0.05s 2026-01-22 12:54:46.511144 | fa163e0d-6f45-64a1-ca76-000000002cb4 | TASK | Include container-puppet tasks for step 1 2026-01-22 12:54:46.557977 | fa163e0d-6f45-64a1-ca76-000000002cb4 | TIMING | Include container-puppet tasks for step 1 | standalone | 0:15:40.522985 | 0.05s 2026-01-22 12:54:46.583374 | 650e5626-cb14-441e-a2de-21136f895e20 | INCLUDED | /root/standalone-ansible-mz1ymllk/host-container-puppet-tasks.yaml | standalone 2026-01-22 12:54:46.629674 | fa163e0d-6f45-64a1-ca76-000000002fe4 | TASK | Write container-puppet-tasks json file for standalone step 1 2026-01-22 12:54:47.139183 | fa163e0d-6f45-64a1-ca76-000000002fe4 | CHANGED | Write container-puppet-tasks json file for standalone step 1 | standalone 2026-01-22 12:54:47.140406 | fa163e0d-6f45-64a1-ca76-000000002fe4 | TIMING | Write container-puppet-tasks json file for standalone step 1 | standalone | 0:15:41.105416 | 0.51s 2026-01-22 12:54:47.170229 | fa163e0d-6f45-64a1-ca76-000000002fe6 | TASK | Generate container puppet configs for step 1 2026-01-22 12:54:47.451823 | fa163e0d-6f45-64a1-ca76-000000002fe6 | OK | Generate container puppet configs for step 1 | standalone 2026-01-22 12:54:47.453113 | fa163e0d-6f45-64a1-ca76-000000002fe6 | TIMING | Generate container puppet configs for step 1 | standalone | 0:15:41.418121 | 0.28s 2026-01-22 12:54:47.483420 | fa163e0d-6f45-64a1-ca76-000000002fe7 | TASK | Manage Puppet containers (bootstrap tasks) for step 1 with tripleo-ansible 2026-01-22 12:54:47.512272 | fa163e0d-6f45-64a1-ca76-000000002fe7 | TIMING | Manage Puppet containers (bootstrap tasks) for step 1 with tripleo-ansible | standalone | 0:15:41.477275 | 0.03s 2026-01-22 12:54:47.570382 | fa163e0d-6f45-64a1-ca76-000000002dd2 | TASK | Gather variables for each operating system 2026-01-22 12:54:47.691428 | fa163e0d-6f45-64a1-ca76-000000002dd2 | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:15:41.656430 | 0.12s 2026-01-22 12:54:47.722406 | fa163e0d-6f45-64a1-ca76-000000002dd3 | TASK | Create container logs path 2026-01-22 12:54:47.954949 | fa163e0d-6f45-64a1-ca76-000000002dd3 | OK | Create container logs path | standalone 2026-01-22 12:54:47.956294 | fa163e0d-6f45-64a1-ca76-000000002dd3 | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:15:41.921302 | 0.23s 2026-01-22 12:54:47.987989 | fa163e0d-6f45-64a1-ca76-000000002dd5 | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_1 2026-01-22 12:54:48.234241 | fa163e0d-6f45-64a1-ca76-000000002dd5 | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_1 | standalone 2026-01-22 12:54:48.235431 | fa163e0d-6f45-64a1-ca76-000000002dd5 | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_1 | standalone | 0:15:42.200438 | 0.25s 2026-01-22 12:54:48.264407 | fa163e0d-6f45-64a1-ca76-000000002dd6 | TASK | Finalise hashes for all containers 2026-01-22 12:54:48.307017 | fa163e0d-6f45-64a1-ca76-000000002dd6 | OK | Finalise hashes for all containers | standalone 2026-01-22 12:54:48.308243 | fa163e0d-6f45-64a1-ca76-000000002dd6 | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:15:42.273250 | 0.04s 2026-01-22 12:54:48.337839 | fa163e0d-6f45-64a1-ca76-000000002dd8 | TASK | Manage systemd shutdown files 2026-01-22 12:54:48.370183 | fa163e0d-6f45-64a1-ca76-000000002dd8 | SKIPPED | Manage systemd shutdown files | standalone 2026-01-22 12:54:48.371329 | fa163e0d-6f45-64a1-ca76-000000002dd8 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:15:42.336336 | 0.03s 2026-01-22 12:54:48.400380 | fa163e0d-6f45-64a1-ca76-000000002dda | TASK | Update container configs with new config hashes 2026-01-22 12:54:48.450179 | fa163e0d-6f45-64a1-ca76-000000002dda | SKIPPED | Update container configs with new config hashes | standalone 2026-01-22 12:54:48.451383 | fa163e0d-6f45-64a1-ca76-000000002dda | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:15:42.416391 | 0.05s 2026-01-22 12:54:48.482684 | fa163e0d-6f45-64a1-ca76-000000002ddb | TASK | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_1 2026-01-22 12:54:48.532726 | fa163e0d-6f45-64a1-ca76-000000002ddb | SKIPPED | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_1 | standalone 2026-01-22 12:54:48.533938 | fa163e0d-6f45-64a1-ca76-000000002ddb | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_1 | standalone | 0:15:42.498946 | 0.05s 2026-01-22 12:54:48.565346 | fa163e0d-6f45-64a1-ca76-000000002ddc | TASK | Create containers from /var/lib/tripleo-config/container-puppet-config/step_1 2026-01-22 12:54:48.616771 | fa163e0d-6f45-64a1-ca76-000000002ddc | SKIPPED | Create containers from /var/lib/tripleo-config/container-puppet-config/step_1 | standalone 2026-01-22 12:54:48.618025 | fa163e0d-6f45-64a1-ca76-000000002ddc | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-puppet-config/step_1 | standalone | 0:15:42.583033 | 0.05s PLAY [External deployment step 2] ********************************************** 2026-01-22 12:54:48.798234 | fa163e0d-6f45-64a1-ca76-0000000000c6 | TASK | External deployment step 2 2026-01-22 12:54:48.827676 | fa163e0d-6f45-64a1-ca76-0000000000c6 | OK | External deployment step 2 | undercloud -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'External deployment step 2' to resume from this task" } [WARNING]: ('undercloud -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000c6') missing from stats 2026-01-22 12:54:48.885800 | fa163e0d-6f45-64a1-ca76-0000000000c7 | TIMING | include_tasks | undercloud | 0:15:42.850801 | 0.04s 2026-01-22 12:54:48.892204 | 1df0dbc4-c5aa-49af-9804-812a9d35723b | INCLUDED | /root/standalone-ansible-mz1ymllk/external_deploy_steps_tasks_step2.yaml | undercloud PLAY [Deploy step tasks for 2] ************************************************* 2026-01-22 12:54:49.028509 | fa163e0d-6f45-64a1-ca76-0000000000ca | TASK | Deploy step tasks for 2 2026-01-22 12:54:49.054829 | fa163e0d-6f45-64a1-ca76-0000000000ca | OK | Deploy step tasks for 2 | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Deploy step tasks for 2' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000ca') missing from stats 2026-01-22 12:54:49.077159 | fa163e0d-6f45-64a1-ca76-0000000000cb | TASK | Write the config_step hieradata for the deploy step 2 tasks 2026-01-22 12:54:49.601928 | fa163e0d-6f45-64a1-ca76-0000000000cb | CHANGED | Write the config_step hieradata for the deploy step 2 tasks | standalone 2026-01-22 12:54:49.603293 | fa163e0d-6f45-64a1-ca76-0000000000cb | TIMING | Write the config_step hieradata for the deploy step 2 tasks | standalone | 0:15:43.568299 | 0.53s [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: '{{ playbook_dir }}/{{ _task_file_path }}' is exists 2026-01-22 12:54:49.671484 | fa163e0d-6f45-64a1-ca76-0000000000cc | TIMING | include_tasks | standalone | 0:15:43.636488 | 0.04s 2026-01-22 12:54:49.766254 | 5ebb3356-bbff-48ba-a1a6-501d6daa702c | INCLUDED | /root/standalone-ansible-mz1ymllk/Standalone/deploy_steps_tasks_step2.yaml | standalone 2026-01-22 12:54:49.801164 | fa163e0d-6f45-64a1-ca76-000000003075 | TASK | Push script 2026-01-22 12:54:50.268022 | fa163e0d-6f45-64a1-ca76-000000003075 | CHANGED | Push script | standalone 2026-01-22 12:54:50.268935 | fa163e0d-6f45-64a1-ca76-000000003075 | TIMING | Push script | standalone | 0:15:44.233950 | 0.47s 2026-01-22 12:54:50.290867 | fa163e0d-6f45-64a1-ca76-000000003076 | TASK | Insert cronjob in root crontab 2026-01-22 12:54:50.723402 | fa163e0d-6f45-64a1-ca76-000000003076 | CHANGED | Insert cronjob in root crontab | standalone 2026-01-22 12:54:50.724814 | fa163e0d-6f45-64a1-ca76-000000003076 | TIMING | Insert cronjob in root crontab | standalone | 0:15:44.689816 | 0.43s 2026-01-22 12:54:50.755022 | fa163e0d-6f45-64a1-ca76-00000000307d | TASK | Gather variables for each operating system 2026-01-22 12:54:50.879481 | fa163e0d-6f45-64a1-ca76-00000000307d | TIMING | tripleo_ha_wrapper : Gather variables for each operating system | standalone | 0:15:44.844483 | 0.12s 2026-01-22 12:54:50.908243 | fa163e0d-6f45-64a1-ca76-00000000307e | TASK | Detect if resource is being created or already exists 2026-01-22 12:54:51.672052 | fa163e0d-6f45-64a1-ca76-00000000307e | CHANGED | Detect if resource is being created or already exists | standalone 2026-01-22 12:54:51.673409 | fa163e0d-6f45-64a1-ca76-00000000307e | TIMING | tripleo_ha_wrapper : Detect if resource is being created or already exists | standalone | 0:15:45.638418 | 0.76s 2026-01-22 12:54:51.703688 | fa163e0d-6f45-64a1-ca76-00000000307f | TASK | Run init bundle puppet on the host for haproxy 2026-01-22 12:56:38.598736 | fa163e0d-6f45-64a1-ca76-00000000307f | CHANGED | Run init bundle puppet on the host for haproxy | standalone 2026-01-22 12:56:38.601730 | fa163e0d-6f45-64a1-ca76-00000000307f | TIMING | tripleo_ha_wrapper : Run init bundle puppet on the host for haproxy | standalone | 0:17:32.566736 | 106.90s 2026-01-22 12:56:38.634291 | fa163e0d-6f45-64a1-ca76-000000003080 | TASK | Check if /var/lib/config-data/puppet-generated/haproxy exists 2026-01-22 12:56:38.900107 | fa163e0d-6f45-64a1-ca76-000000003080 | OK | Check if /var/lib/config-data/puppet-generated/haproxy exists | standalone 2026-01-22 12:56:38.901508 | fa163e0d-6f45-64a1-ca76-000000003080 | TIMING | tripleo_ha_wrapper : Check if /var/lib/config-data/puppet-generated/haproxy exists | standalone | 0:17:32.866516 | 0.26s 2026-01-22 12:56:38.930740 | fa163e0d-6f45-64a1-ca76-000000003081 | TASK | Run pacemaker restart if the config file for the service changed 2026-01-22 12:56:39.524488 | fa163e0d-6f45-64a1-ca76-000000003081 | CHANGED | Run pacemaker restart if the config file for the service changed | standalone 2026-01-22 12:56:39.525839 | fa163e0d-6f45-64a1-ca76-000000003081 | TIMING | tripleo_ha_wrapper : Run pacemaker restart if the config file for the service changed | standalone | 0:17:33.490848 | 0.59s 2026-01-22 12:56:39.556310 | fa163e0d-6f45-64a1-ca76-000000003082 | TASK | Check if an image update marker exists for bundle haproxy-bundle 2026-01-22 12:56:39.801459 | fa163e0d-6f45-64a1-ca76-000000003082 | OK | Check if an image update marker exists for bundle haproxy-bundle | standalone 2026-01-22 12:56:39.802733 | fa163e0d-6f45-64a1-ca76-000000003082 | TIMING | tripleo_ha_wrapper : Check if an image update marker exists for bundle haproxy-bundle | standalone | 0:17:33.767741 | 0.24s 2026-01-22 12:56:39.833813 | fa163e0d-6f45-64a1-ca76-000000003084 | TASK | Get container image tag from marker for bundle haproxy-bundle 2026-01-22 12:56:39.887160 | fa163e0d-6f45-64a1-ca76-000000003084 | SKIPPED | Get container image tag from marker for bundle haproxy-bundle | standalone 2026-01-22 12:56:39.888223 | fa163e0d-6f45-64a1-ca76-000000003084 | TIMING | tripleo_ha_wrapper : Get container image tag from marker for bundle haproxy-bundle | standalone | 0:17:33.853230 | 0.05s 2026-01-22 12:56:39.918321 | fa163e0d-6f45-64a1-ca76-000000003085 | TASK | Get container image tag from bundle haproxy-bundle 2026-01-22 12:56:39.972382 | fa163e0d-6f45-64a1-ca76-000000003085 | SKIPPED | Get container image tag from bundle haproxy-bundle | standalone 2026-01-22 12:56:39.973948 | fa163e0d-6f45-64a1-ca76-000000003085 | TIMING | tripleo_ha_wrapper : Get container image tag from bundle haproxy-bundle | standalone | 0:17:33.938956 | 0.05s 2026-01-22 12:56:40.003794 | fa163e0d-6f45-64a1-ca76-000000003086 | TASK | Compare tags between marker and bundle haproxy-bundle 2026-01-22 12:56:40.056270 | fa163e0d-6f45-64a1-ca76-000000003086 | SKIPPED | Compare tags between marker and bundle haproxy-bundle | standalone 2026-01-22 12:56:40.057514 | fa163e0d-6f45-64a1-ca76-000000003086 | TIMING | tripleo_ha_wrapper : Compare tags between marker and bundle haproxy-bundle | standalone | 0:17:34.022522 | 0.05s 2026-01-22 12:56:40.089699 | fa163e0d-6f45-64a1-ca76-000000003087 | TASK | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} 2026-01-22 12:56:40.141878 | fa163e0d-6f45-64a1-ca76-000000003087 | SKIPPED | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone 2026-01-22 12:56:40.143136 | fa163e0d-6f45-64a1-ca76-000000003087 | TIMING | tripleo_ha_wrapper : Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone | 0:17:34.108143 | 0.05s 2026-01-22 12:56:40.173770 | fa163e0d-6f45-64a1-ca76-000000003088 | TASK | Remove update marker for bundle haproxy-bundle 2026-01-22 12:56:40.226930 | fa163e0d-6f45-64a1-ca76-000000003088 | SKIPPED | Remove update marker for bundle haproxy-bundle | standalone 2026-01-22 12:56:40.228240 | fa163e0d-6f45-64a1-ca76-000000003088 | TIMING | tripleo_ha_wrapper : Remove update marker for bundle haproxy-bundle | standalone | 0:17:34.193248 | 0.05s 2026-01-22 12:56:40.284002 | fa163e0d-6f45-64a1-ca76-0000000030a9 | TASK | Gather variables for each operating system 2026-01-22 12:56:40.400153 | fa163e0d-6f45-64a1-ca76-0000000030a9 | TIMING | tripleo_ha_wrapper : Gather variables for each operating system | standalone | 0:17:34.365151 | 0.11s 2026-01-22 12:56:40.429803 | fa163e0d-6f45-64a1-ca76-0000000030aa | TASK | Detect if resource is being created or already exists 2026-01-22 12:56:41.164842 | fa163e0d-6f45-64a1-ca76-0000000030aa | CHANGED | Detect if resource is being created or already exists | standalone 2026-01-22 12:56:41.166229 | fa163e0d-6f45-64a1-ca76-0000000030aa | TIMING | tripleo_ha_wrapper : Detect if resource is being created or already exists | standalone | 0:17:35.131238 | 0.73s 2026-01-22 12:56:41.196460 | fa163e0d-6f45-64a1-ca76-0000000030ab | TASK | Run init bundle puppet on the host for mysql 2026-01-22 12:57:10.496455 | fa163e0d-6f45-64a1-ca76-0000000030ab | CHANGED | Run init bundle puppet on the host for mysql | standalone 2026-01-22 12:57:10.498374 | fa163e0d-6f45-64a1-ca76-0000000030ab | TIMING | tripleo_ha_wrapper : Run init bundle puppet on the host for mysql | standalone | 0:18:04.463380 | 29.30s 2026-01-22 12:57:10.534811 | fa163e0d-6f45-64a1-ca76-0000000030ac | TASK | Check if /var/lib/config-data/puppet-generated/mysql exists 2026-01-22 12:57:10.764600 | fa163e0d-6f45-64a1-ca76-0000000030ac | OK | Check if /var/lib/config-data/puppet-generated/mysql exists | standalone 2026-01-22 12:57:10.765593 | fa163e0d-6f45-64a1-ca76-0000000030ac | TIMING | tripleo_ha_wrapper : Check if /var/lib/config-data/puppet-generated/mysql exists | standalone | 0:18:04.730605 | 0.23s 2026-01-22 12:57:10.789043 | fa163e0d-6f45-64a1-ca76-0000000030ad | TASK | Run pacemaker restart if the config file for the service changed 2026-01-22 12:57:11.233744 | fa163e0d-6f45-64a1-ca76-0000000030ad | CHANGED | Run pacemaker restart if the config file for the service changed | standalone 2026-01-22 12:57:11.234445 | fa163e0d-6f45-64a1-ca76-0000000030ad | TIMING | tripleo_ha_wrapper : Run pacemaker restart if the config file for the service changed | standalone | 0:18:05.199463 | 0.44s 2026-01-22 12:57:11.254143 | fa163e0d-6f45-64a1-ca76-0000000030ae | TASK | Check if an image update marker exists for bundle galera-bundle 2026-01-22 12:57:11.502758 | fa163e0d-6f45-64a1-ca76-0000000030ae | OK | Check if an image update marker exists for bundle galera-bundle | standalone 2026-01-22 12:57:11.504008 | fa163e0d-6f45-64a1-ca76-0000000030ae | TIMING | tripleo_ha_wrapper : Check if an image update marker exists for bundle galera-bundle | standalone | 0:18:05.469017 | 0.25s 2026-01-22 12:57:11.535832 | fa163e0d-6f45-64a1-ca76-0000000030b0 | TASK | Get container image tag from marker for bundle galera-bundle 2026-01-22 12:57:11.568285 | fa163e0d-6f45-64a1-ca76-0000000030b0 | SKIPPED | Get container image tag from marker for bundle galera-bundle | standalone 2026-01-22 12:57:11.569375 | fa163e0d-6f45-64a1-ca76-0000000030b0 | TIMING | tripleo_ha_wrapper : Get container image tag from marker for bundle galera-bundle | standalone | 0:18:05.534381 | 0.03s 2026-01-22 12:57:11.600143 | fa163e0d-6f45-64a1-ca76-0000000030b1 | TASK | Get container image tag from bundle galera-bundle 2026-01-22 12:57:11.654280 | fa163e0d-6f45-64a1-ca76-0000000030b1 | SKIPPED | Get container image tag from bundle galera-bundle | standalone 2026-01-22 12:57:11.655532 | fa163e0d-6f45-64a1-ca76-0000000030b1 | TIMING | tripleo_ha_wrapper : Get container image tag from bundle galera-bundle | standalone | 0:18:05.620539 | 0.05s 2026-01-22 12:57:11.686336 | fa163e0d-6f45-64a1-ca76-0000000030b2 | TASK | Compare tags between marker and bundle galera-bundle 2026-01-22 12:57:11.734276 | fa163e0d-6f45-64a1-ca76-0000000030b2 | SKIPPED | Compare tags between marker and bundle galera-bundle | standalone 2026-01-22 12:57:11.735091 | fa163e0d-6f45-64a1-ca76-0000000030b2 | TIMING | tripleo_ha_wrapper : Compare tags between marker and bundle galera-bundle | standalone | 0:18:05.700105 | 0.05s 2026-01-22 12:57:11.759244 | fa163e0d-6f45-64a1-ca76-0000000030b3 | TASK | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} 2026-01-22 12:57:11.809273 | fa163e0d-6f45-64a1-ca76-0000000030b3 | SKIPPED | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone 2026-01-22 12:57:11.811177 | fa163e0d-6f45-64a1-ca76-0000000030b3 | TIMING | tripleo_ha_wrapper : Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone | 0:18:05.776187 | 0.05s 2026-01-22 12:57:11.836510 | fa163e0d-6f45-64a1-ca76-0000000030b4 | TASK | Remove update marker for bundle galera-bundle 2026-01-22 12:57:11.882450 | fa163e0d-6f45-64a1-ca76-0000000030b4 | SKIPPED | Remove update marker for bundle galera-bundle | standalone 2026-01-22 12:57:11.884961 | fa163e0d-6f45-64a1-ca76-0000000030b4 | TIMING | tripleo_ha_wrapper : Remove update marker for bundle galera-bundle | standalone | 0:18:05.849872 | 0.05s 2026-01-22 12:57:11.947374 | fa163e0d-6f45-64a1-ca76-0000000030d5 | TASK | Gather variables for each operating system 2026-01-22 12:57:12.070971 | fa163e0d-6f45-64a1-ca76-0000000030d5 | TIMING | tripleo_ha_wrapper : Gather variables for each operating system | standalone | 0:18:06.035979 | 0.12s 2026-01-22 12:57:12.098981 | fa163e0d-6f45-64a1-ca76-0000000030d6 | TASK | Detect if resource is being created or already exists 2026-01-22 12:57:12.857926 | fa163e0d-6f45-64a1-ca76-0000000030d6 | CHANGED | Detect if resource is being created or already exists | standalone 2026-01-22 12:57:12.858749 | fa163e0d-6f45-64a1-ca76-0000000030d6 | TIMING | tripleo_ha_wrapper : Detect if resource is being created or already exists | standalone | 0:18:06.823765 | 0.76s 2026-01-22 12:57:12.878827 | fa163e0d-6f45-64a1-ca76-0000000030d7 | TASK | Run init bundle puppet on the host for oslo_messaging_rpc 2026-01-22 12:57:39.942619 | fa163e0d-6f45-64a1-ca76-0000000030d7 | CHANGED | Run init bundle puppet on the host for oslo_messaging_rpc | standalone 2026-01-22 12:57:39.944430 | fa163e0d-6f45-64a1-ca76-0000000030d7 | TIMING | tripleo_ha_wrapper : Run init bundle puppet on the host for oslo_messaging_rpc | standalone | 0:18:33.909436 | 27.06s 2026-01-22 12:57:39.976843 | fa163e0d-6f45-64a1-ca76-0000000030d8 | TASK | Check if /var/lib/config-data/puppet-generated/rabbitmq exists 2026-01-22 12:57:40.270747 | fa163e0d-6f45-64a1-ca76-0000000030d8 | OK | Check if /var/lib/config-data/puppet-generated/rabbitmq exists | standalone 2026-01-22 12:57:40.271875 | fa163e0d-6f45-64a1-ca76-0000000030d8 | TIMING | tripleo_ha_wrapper : Check if /var/lib/config-data/puppet-generated/rabbitmq exists | standalone | 0:18:34.236889 | 0.29s 2026-01-22 12:57:40.294396 | fa163e0d-6f45-64a1-ca76-0000000030d9 | TASK | Run pacemaker restart if the config file for the service changed 2026-01-22 12:57:40.719461 | fa163e0d-6f45-64a1-ca76-0000000030d9 | CHANGED | Run pacemaker restart if the config file for the service changed | standalone 2026-01-22 12:57:40.721923 | fa163e0d-6f45-64a1-ca76-0000000030d9 | TIMING | tripleo_ha_wrapper : Run pacemaker restart if the config file for the service changed | standalone | 0:18:34.686926 | 0.43s 2026-01-22 12:57:40.753828 | fa163e0d-6f45-64a1-ca76-0000000030da | TASK | Check if an image update marker exists for bundle rabbitmq-bundle 2026-01-22 12:57:40.994627 | fa163e0d-6f45-64a1-ca76-0000000030da | OK | Check if an image update marker exists for bundle rabbitmq-bundle | standalone 2026-01-22 12:57:40.995982 | fa163e0d-6f45-64a1-ca76-0000000030da | TIMING | tripleo_ha_wrapper : Check if an image update marker exists for bundle rabbitmq-bundle | standalone | 0:18:34.960989 | 0.24s 2026-01-22 12:57:41.027027 | fa163e0d-6f45-64a1-ca76-0000000030dc | TASK | Get container image tag from marker for bundle rabbitmq-bundle 2026-01-22 12:57:41.056566 | fa163e0d-6f45-64a1-ca76-0000000030dc | SKIPPED | Get container image tag from marker for bundle rabbitmq-bundle | standalone 2026-01-22 12:57:41.058124 | fa163e0d-6f45-64a1-ca76-0000000030dc | TIMING | tripleo_ha_wrapper : Get container image tag from marker for bundle rabbitmq-bundle | standalone | 0:18:35.023130 | 0.03s 2026-01-22 12:57:41.092126 | fa163e0d-6f45-64a1-ca76-0000000030dd | TASK | Get container image tag from bundle rabbitmq-bundle 2026-01-22 12:57:41.143091 | fa163e0d-6f45-64a1-ca76-0000000030dd | SKIPPED | Get container image tag from bundle rabbitmq-bundle | standalone 2026-01-22 12:57:41.144616 | fa163e0d-6f45-64a1-ca76-0000000030dd | TIMING | tripleo_ha_wrapper : Get container image tag from bundle rabbitmq-bundle | standalone | 0:18:35.109622 | 0.05s 2026-01-22 12:57:41.177121 | fa163e0d-6f45-64a1-ca76-0000000030de | TASK | Compare tags between marker and bundle rabbitmq-bundle 2026-01-22 12:57:41.238559 | fa163e0d-6f45-64a1-ca76-0000000030de | SKIPPED | Compare tags between marker and bundle rabbitmq-bundle | standalone 2026-01-22 12:57:41.240069 | fa163e0d-6f45-64a1-ca76-0000000030de | TIMING | tripleo_ha_wrapper : Compare tags between marker and bundle rabbitmq-bundle | standalone | 0:18:35.205075 | 0.06s 2026-01-22 12:57:41.276469 | fa163e0d-6f45-64a1-ca76-0000000030df | TASK | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} 2026-01-22 12:57:41.327201 | fa163e0d-6f45-64a1-ca76-0000000030df | SKIPPED | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone 2026-01-22 12:57:41.328625 | fa163e0d-6f45-64a1-ca76-0000000030df | TIMING | tripleo_ha_wrapper : Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone | 0:18:35.293631 | 0.05s 2026-01-22 12:57:41.361035 | fa163e0d-6f45-64a1-ca76-0000000030e0 | TASK | Remove update marker for bundle rabbitmq-bundle 2026-01-22 12:57:41.416256 | fa163e0d-6f45-64a1-ca76-0000000030e0 | SKIPPED | Remove update marker for bundle rabbitmq-bundle | standalone 2026-01-22 12:57:41.417582 | fa163e0d-6f45-64a1-ca76-0000000030e0 | TIMING | tripleo_ha_wrapper : Remove update marker for bundle rabbitmq-bundle | standalone | 0:18:35.382588 | 0.05s 2026-01-22 12:57:41.476298 | fa163e0d-6f45-64a1-ca76-000000003101 | TASK | Gather variables for each operating system 2026-01-22 12:57:41.595350 | fa163e0d-6f45-64a1-ca76-000000003101 | TIMING | tripleo_ha_wrapper : Gather variables for each operating system | standalone | 0:18:35.560348 | 0.12s 2026-01-22 12:57:41.630783 | fa163e0d-6f45-64a1-ca76-000000003102 | TASK | Detect if resource is being created or already exists 2026-01-22 12:57:42.373386 | fa163e0d-6f45-64a1-ca76-000000003102 | CHANGED | Detect if resource is being created or already exists | standalone 2026-01-22 12:57:42.374216 | fa163e0d-6f45-64a1-ca76-000000003102 | TIMING | tripleo_ha_wrapper : Detect if resource is being created or already exists | standalone | 0:18:36.339225 | 0.74s 2026-01-22 12:57:42.398863 | fa163e0d-6f45-64a1-ca76-000000003103 | TASK | Run init bundle puppet on the host for redis 2026-01-22 12:58:10.475638 | fa163e0d-6f45-64a1-ca76-000000003103 | CHANGED | Run init bundle puppet on the host for redis | standalone 2026-01-22 12:58:10.477384 | fa163e0d-6f45-64a1-ca76-000000003103 | TIMING | tripleo_ha_wrapper : Run init bundle puppet on the host for redis | standalone | 0:19:04.442391 | 28.08s 2026-01-22 12:58:10.509228 | fa163e0d-6f45-64a1-ca76-000000003104 | TASK | Check if /var/lib/config-data/puppet-generated/redis exists 2026-01-22 12:58:10.742111 | fa163e0d-6f45-64a1-ca76-000000003104 | OK | Check if /var/lib/config-data/puppet-generated/redis exists | standalone 2026-01-22 12:58:10.743513 | fa163e0d-6f45-64a1-ca76-000000003104 | TIMING | tripleo_ha_wrapper : Check if /var/lib/config-data/puppet-generated/redis exists | standalone | 0:19:04.708520 | 0.23s 2026-01-22 12:58:10.774706 | fa163e0d-6f45-64a1-ca76-000000003105 | TASK | Run pacemaker restart if the config file for the service changed 2026-01-22 12:58:11.136218 | fa163e0d-6f45-64a1-ca76-000000003105 | CHANGED | Run pacemaker restart if the config file for the service changed | standalone 2026-01-22 12:58:11.137512 | fa163e0d-6f45-64a1-ca76-000000003105 | TIMING | tripleo_ha_wrapper : Run pacemaker restart if the config file for the service changed | standalone | 0:19:05.102519 | 0.36s 2026-01-22 12:58:11.164635 | fa163e0d-6f45-64a1-ca76-000000003106 | TASK | Check if an image update marker exists for bundle redis-bundle 2026-01-22 12:58:11.383614 | fa163e0d-6f45-64a1-ca76-000000003106 | OK | Check if an image update marker exists for bundle redis-bundle | standalone 2026-01-22 12:58:11.385146 | fa163e0d-6f45-64a1-ca76-000000003106 | TIMING | tripleo_ha_wrapper : Check if an image update marker exists for bundle redis-bundle | standalone | 0:19:05.350153 | 0.22s 2026-01-22 12:58:11.409939 | fa163e0d-6f45-64a1-ca76-000000003108 | TASK | Get container image tag from marker for bundle redis-bundle 2026-01-22 12:58:11.438951 | fa163e0d-6f45-64a1-ca76-000000003108 | SKIPPED | Get container image tag from marker for bundle redis-bundle | standalone 2026-01-22 12:58:11.439736 | fa163e0d-6f45-64a1-ca76-000000003108 | TIMING | tripleo_ha_wrapper : Get container image tag from marker for bundle redis-bundle | standalone | 0:19:05.404750 | 0.03s 2026-01-22 12:58:11.463736 | fa163e0d-6f45-64a1-ca76-000000003109 | TASK | Get container image tag from bundle redis-bundle 2026-01-22 12:58:11.527325 | fa163e0d-6f45-64a1-ca76-000000003109 | SKIPPED | Get container image tag from bundle redis-bundle | standalone 2026-01-22 12:58:11.528135 | fa163e0d-6f45-64a1-ca76-000000003109 | TIMING | tripleo_ha_wrapper : Get container image tag from bundle redis-bundle | standalone | 0:19:05.493149 | 0.06s 2026-01-22 12:58:11.554448 | fa163e0d-6f45-64a1-ca76-00000000310a | TASK | Compare tags between marker and bundle redis-bundle 2026-01-22 12:58:11.594204 | fa163e0d-6f45-64a1-ca76-00000000310a | SKIPPED | Compare tags between marker and bundle redis-bundle | standalone 2026-01-22 12:58:11.596070 | fa163e0d-6f45-64a1-ca76-00000000310a | TIMING | tripleo_ha_wrapper : Compare tags between marker and bundle redis-bundle | standalone | 0:19:05.561014 | 0.04s 2026-01-22 12:58:11.619655 | fa163e0d-6f45-64a1-ca76-00000000310b | TASK | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} 2026-01-22 12:58:11.653028 | fa163e0d-6f45-64a1-ca76-00000000310b | SKIPPED | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone 2026-01-22 12:58:11.654780 | fa163e0d-6f45-64a1-ca76-00000000310b | TIMING | tripleo_ha_wrapper : Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone | 0:19:05.619792 | 0.03s 2026-01-22 12:58:11.678708 | fa163e0d-6f45-64a1-ca76-00000000310c | TASK | Remove update marker for bundle redis-bundle 2026-01-22 12:58:11.709948 | fa163e0d-6f45-64a1-ca76-00000000310c | SKIPPED | Remove update marker for bundle redis-bundle | standalone 2026-01-22 12:58:11.710770 | fa163e0d-6f45-64a1-ca76-00000000310c | TIMING | tripleo_ha_wrapper : Remove update marker for bundle redis-bundle | standalone | 0:19:05.675782 | 0.03s 2026-01-22 12:58:11.747170 | fa163e0d-6f45-64a1-ca76-0000000000ce | TASK | Check if /var/lib/tripleo-config/container-startup-config/step_2 already exists 2026-01-22 12:58:11.976297 | fa163e0d-6f45-64a1-ca76-0000000000ce | OK | Check if /var/lib/tripleo-config/container-startup-config/step_2 already exists | standalone 2026-01-22 12:58:11.978202 | fa163e0d-6f45-64a1-ca76-0000000000ce | TIMING | Check if /var/lib/tripleo-config/container-startup-config/step_2 already exists | standalone | 0:19:05.943203 | 0.23s 2026-01-22 12:58:12.086213 | fa163e0d-6f45-64a1-ca76-0000000000cf | TIMING | include_tasks | standalone | 0:19:06.051193 | 0.08s 2026-01-22 12:58:12.127195 | 719942ed-3486-4151-a92a-846551685dc1 | INCLUDED | /root/standalone-ansible-mz1ymllk/common_deploy_steps_tasks.yaml | standalone 2026-01-22 12:58:12.154994 | fa163e0d-6f45-64a1-ca76-0000000031a1 | TASK | Write the config_step hieradata 2026-01-22 12:58:12.648279 | fa163e0d-6f45-64a1-ca76-0000000031a1 | OK | Write the config_step hieradata | standalone 2026-01-22 12:58:12.649065 | fa163e0d-6f45-64a1-ca76-0000000031a1 | TIMING | Write the config_step hieradata | standalone | 0:19:06.614080 | 0.49s 2026-01-22 12:58:12.669086 | fa163e0d-6f45-64a1-ca76-0000000031a2 | TASK | Run puppet host configuration for step 2 2026-01-22 12:58:12.918605 | fa163e0d-6f45-64a1-ca76-0000000031a2 | CHANGED | Run puppet host configuration for step 2 | standalone 2026-01-22 12:58:12.919976 | fa163e0d-6f45-64a1-ca76-0000000031a2 | TIMING | Run puppet host configuration for step 2 | standalone | 0:19:06.884981 | 0.25s 2026-01-22 12:58:12.948883 | fa163e0d-6f45-64a1-ca76-0000000031a3 | TASK | Wait for puppet host configuration to finish 2026-01-22 12:58:13.203910 | fa163e0d-6f45-64a1-ca76-0000000031a3 | WAITING | Wait for puppet host configuration to finish | standalone | 360 retries left 2026-01-22 12:58:23.409435 | fa163e0d-6f45-64a1-ca76-0000000031a3 | WAITING | Wait for puppet host configuration to finish | standalone | 359 retries left 2026-01-22 12:58:33.626696 | fa163e0d-6f45-64a1-ca76-0000000031a3 | CHANGED | Wait for puppet host configuration to finish | standalone 2026-01-22 12:58:33.628988 | fa163e0d-6f45-64a1-ca76-0000000031a3 | TIMING | Wait for puppet host configuration to finish | standalone | 0:19:27.593986 | 20.68s 2026-01-22 12:58:33.659153 | fa163e0d-6f45-64a1-ca76-0000000031a4 | TASK | Debug output for task: Run puppet host configuration for step 2 2026-01-22 12:58:33.742370 | fa163e0d-6f45-64a1-ca76-0000000031a4 | CHANGED | Debug output for task: Run puppet host configuration for step 2 | standalone | result={ "changed": true, "failed_when_result": false, "puppet_host_outputs.stdout_lines | default([]) | union(puppet_host_outputs.stderr_lines | default([]))": [ "<13>Jan 22 12:58:13 puppet-user: Warning: /etc/puppet/hiera.yaml: Use of 'hiera.yaml' version 3 is deprecated. It should be converted to version 5", "<13>Jan 22 12:58:20 puppet-user: (file: /etc/puppet/hiera.yaml)", "<13>Jan 22 12:58:20 puppet-user: Warning: Undefined variable '::deploy_config_name'; ", "<13>Jan 22 12:58:20 puppet-user: (file & line not available)", "<13>Jan 22 12:58:20 puppet-user: Warning: The function 'hiera' is deprecated in favor of using 'lookup'. See https://puppet.com/docs/puppet/7.10/deprecated_language.html", "<13>Jan 22 12:58:20 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/profile/base/database/mysql/client.pp, line: 89, column: 8)", "<13>Jan 22 12:58:20 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/packages.pp, line: 39, column: 69)", "<13>Jan 22 12:58:20 puppet-user: Notice: Compiled catalog for standalone.ooo.test in environment production in 0.48 seconds", "<13>Jan 22 12:58:25 puppet-user: Notice: /Stage[main]/Pacemaker::Resource_defaults/Pcmk_resource_default[resource-stickiness]/ensure: created", "<13>Jan 22 12:58:29 puppet-user: Notice: /Stage[main]/Pacemaker::Resource_op_defaults/Pcmk_resource_op_default[bundle]/ensure: created", "<13>Jan 22 12:58:30 puppet-user: Deprecation Warning: This command is deprecated and will be removed. Please use 'pcs property config' instead.", "<13>Jan 22 12:58:30 puppet-user: Notice: Applied catalog in 9.98 seconds", "<13>Jan 22 12:58:30 puppet-user: Application:", "<13>Jan 22 12:58:30 puppet-user: Initial environment: production", "<13>Jan 22 12:58:30 puppet-user: Converged environment: production", "<13>Jan 22 12:58:30 puppet-user: Run mode: user", "<13>Jan 22 12:58:30 puppet-user: Changes:", "<13>Jan 22 12:58:30 puppet-user: Total: 2", "<13>Jan 22 12:58:30 puppet-user: Events:", "<13>Jan 22 12:58:30 puppet-user: Success: 2", "<13>Jan 22 12:58:30 puppet-user: Resources:", "<13>Jan 22 12:58:30 puppet-user: Changed: 2", "<13>Jan 22 12:58:30 puppet-user: Out of sync: 2", "<13>Jan 22 12:58:30 puppet-user: Total: 28", "<13>Jan 22 12:58:30 puppet-user: Time:", "<13>Jan 22 12:58:30 puppet-user: Filebucket: 0.00", "<13>Jan 22 12:58:30 puppet-user: Schedule: 0.00", "<13>Jan 22 12:58:30 puppet-user: Package: 0.00", "<13>Jan 22 12:58:30 puppet-user: File line: 0.00", "<13>Jan 22 12:58:30 puppet-user: File: 0.00", "<13>Jan 22 12:58:30 puppet-user: Augeas: 0.01", "<13>Jan 22 12:58:30 puppet-user: User: 0.01", "<13>Jan 22 12:58:30 puppet-user: Service: 0.09", "<13>Jan 22 12:58:30 puppet-user: Config retrieval: 0.53", "<13>Jan 22 12:58:30 puppet-user: Pcmk property: 1.46", "<13>Jan 22 12:58:30 puppet-user: Exec: 1.90", "<13>Jan 22 12:58:30 puppet-user: Last run: 1769086710", "<13>Jan 22 12:58:30 puppet-user: Pcmk resource default: 3.11", "<13>Jan 22 12:58:30 puppet-user: Pcmk resource op default: 3.15", "<13>Jan 22 12:58:30 puppet-user: Transaction evaluation: 9.97", "<13>Jan 22 12:58:30 puppet-user: Catalog application: 9.98", "<13>Jan 22 12:58:30 puppet-user: Total: 9.99", "<13>Jan 22 12:58:30 puppet-user: Version:", "<13>Jan 22 12:58:30 puppet-user: Config: 1769086700", "<13>Jan 22 12:58:30 puppet-user: Puppet: 7.10.0" ] } 2026-01-22 12:58:33.743604 | fa163e0d-6f45-64a1-ca76-0000000031a4 | TIMING | Debug output for task: Run puppet host configuration for step 2 | standalone | 0:19:27.708612 | 0.08s 2026-01-22 12:58:33.772433 | fa163e0d-6f45-64a1-ca76-0000000031a5 | TASK | Pre-cache facts for puppet containers 2026-01-22 12:58:33.801369 | fa163e0d-6f45-64a1-ca76-0000000031a5 | TIMING | Pre-cache facts for puppet containers | standalone | 0:19:27.766374 | 0.03s 2026-01-22 12:58:33.868601 | fa163e0d-6f45-64a1-ca76-0000000031e4 | TASK | Gather variables for each operating system 2026-01-22 12:58:33.994112 | fa163e0d-6f45-64a1-ca76-0000000031e4 | TIMING | tripleo_puppet_cache : Gather variables for each operating system | standalone | 0:19:27.959091 | 0.12s 2026-01-22 12:58:34.023386 | fa163e0d-6f45-64a1-ca76-0000000031e5 | TASK | Create puppet caching structures 2026-01-22 12:58:34.268633 | fa163e0d-6f45-64a1-ca76-0000000031e5 | CHANGED | Create puppet caching structures | standalone 2026-01-22 12:58:34.270005 | fa163e0d-6f45-64a1-ca76-0000000031e5 | TIMING | tripleo_puppet_cache : Create puppet caching structures | standalone | 0:19:28.235014 | 0.24s 2026-01-22 12:58:34.299060 | fa163e0d-6f45-64a1-ca76-0000000031e6 | TASK | Check for facter.conf 2026-01-22 12:58:34.543822 | fa163e0d-6f45-64a1-ca76-0000000031e6 | OK | Check for facter.conf | standalone 2026-01-22 12:58:34.545181 | fa163e0d-6f45-64a1-ca76-0000000031e6 | TIMING | tripleo_puppet_cache : Check for facter.conf | standalone | 0:19:28.510190 | 0.24s 2026-01-22 12:58:34.574019 | fa163e0d-6f45-64a1-ca76-0000000031e7 | TASK | Remove facter.conf if directory 2026-01-22 12:58:34.624189 | fa163e0d-6f45-64a1-ca76-0000000031e7 | SKIPPED | Remove facter.conf if directory | standalone 2026-01-22 12:58:34.625403 | fa163e0d-6f45-64a1-ca76-0000000031e7 | TIMING | tripleo_puppet_cache : Remove facter.conf if directory | standalone | 0:19:28.590411 | 0.05s 2026-01-22 12:58:34.653766 | fa163e0d-6f45-64a1-ca76-0000000031e8 | TASK | Write facter cache config 2026-01-22 12:58:35.149923 | fa163e0d-6f45-64a1-ca76-0000000031e8 | CHANGED | Write facter cache config | standalone 2026-01-22 12:58:35.151292 | fa163e0d-6f45-64a1-ca76-0000000031e8 | TIMING | tripleo_puppet_cache : Write facter cache config | standalone | 0:19:29.116301 | 0.50s 2026-01-22 12:58:35.180166 | fa163e0d-6f45-64a1-ca76-0000000031e9 | TASK | Cleanup facter cache if exists 2026-01-22 12:58:35.411116 | fa163e0d-6f45-64a1-ca76-0000000031e9 | CHANGED | Cleanup facter cache if exists | standalone 2026-01-22 12:58:35.412500 | fa163e0d-6f45-64a1-ca76-0000000031e9 | TIMING | tripleo_puppet_cache : Cleanup facter cache if exists | standalone | 0:19:29.377509 | 0.23s 2026-01-22 12:58:35.443099 | fa163e0d-6f45-64a1-ca76-0000000031ea | TASK | Pre-cache facts 2026-01-22 12:58:36.068311 | fa163e0d-6f45-64a1-ca76-0000000031ea | CHANGED | Pre-cache facts | standalone 2026-01-22 12:58:36.069625 | fa163e0d-6f45-64a1-ca76-0000000031ea | TIMING | tripleo_puppet_cache : Pre-cache facts | standalone | 0:19:30.034631 | 0.62s 2026-01-22 12:58:36.100429 | fa163e0d-6f45-64a1-ca76-0000000031eb | TASK | Failed deployment if facter fails 2026-01-22 12:58:36.132630 | fa163e0d-6f45-64a1-ca76-0000000031eb | SKIPPED | Failed deployment if facter fails | standalone 2026-01-22 12:58:36.133919 | fa163e0d-6f45-64a1-ca76-0000000031eb | TIMING | tripleo_puppet_cache : Failed deployment if facter fails | standalone | 0:19:30.098923 | 0.03s 2026-01-22 12:58:36.178054 | fa163e0d-6f45-64a1-ca76-0000000031ec | TASK | Sync cached facts 2026-01-22 12:58:36.487760 | fa163e0d-6f45-64a1-ca76-0000000031ec | CHANGED | Sync cached facts | standalone -> 192.168.122.100 [WARNING]: ('standalone -> 192.168.122.100', 'fa163e0d-6f45-64a1-ca76-0000000031ec') missing from stats 2026-01-22 12:58:36.543350 | fa163e0d-6f45-64a1-ca76-0000000031a6 | TASK | Include container-puppet tasks (generate config) during step 1 2026-01-22 12:58:36.583139 | fa163e0d-6f45-64a1-ca76-0000000031a6 | SKIPPED | Include container-puppet tasks (generate config) during step 1 | standalone 2026-01-22 12:58:36.584279 | fa163e0d-6f45-64a1-ca76-0000000031a6 | TIMING | Include container-puppet tasks (generate config) during step 1 | standalone | 0:19:30.549285 | 0.04s 2026-01-22 12:58:36.614786 | fa163e0d-6f45-64a1-ca76-0000000031a8 | TASK | Manage containers for step 2 with tripleo-ansible 2026-01-22 12:58:36.646801 | fa163e0d-6f45-64a1-ca76-0000000031a8 | TIMING | Manage containers for step 2 with tripleo-ansible | standalone | 0:19:30.611804 | 0.03s 2026-01-22 12:58:36.712255 | fa163e0d-6f45-64a1-ca76-000000003243 | TASK | Gather variables for each operating system 2026-01-22 12:58:36.829375 | fa163e0d-6f45-64a1-ca76-000000003243 | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:19:30.794370 | 0.12s 2026-01-22 12:58:36.858735 | fa163e0d-6f45-64a1-ca76-000000003244 | TASK | Create container logs path 2026-01-22 12:58:37.112046 | fa163e0d-6f45-64a1-ca76-000000003244 | OK | Create container logs path | standalone 2026-01-22 12:58:37.113441 | fa163e0d-6f45-64a1-ca76-000000003244 | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:19:31.078448 | 0.25s 2026-01-22 12:58:37.144769 | fa163e0d-6f45-64a1-ca76-000000003246 | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_2 2026-01-22 12:58:37.420165 | fa163e0d-6f45-64a1-ca76-000000003246 | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_2 | standalone 2026-01-22 12:58:37.421427 | fa163e0d-6f45-64a1-ca76-000000003246 | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_2 | standalone | 0:19:31.386433 | 0.28s 2026-01-22 12:58:37.451243 | fa163e0d-6f45-64a1-ca76-000000003247 | TASK | Finalise hashes for all containers 2026-01-22 12:58:37.522863 | fa163e0d-6f45-64a1-ca76-000000003247 | OK | Finalise hashes for all containers | standalone 2026-01-22 12:58:37.523922 | fa163e0d-6f45-64a1-ca76-000000003247 | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:19:31.488930 | 0.07s 2026-01-22 12:58:37.552051 | fa163e0d-6f45-64a1-ca76-000000003249 | TASK | Manage systemd shutdown files 2026-01-22 12:58:37.584759 | fa163e0d-6f45-64a1-ca76-000000003249 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:19:31.549763 | 0.03s 2026-01-22 12:58:37.617506 | 4cafdeee-058a-4b15-82ec-aaf2fc5db6e5 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/shutdown.yml | standalone 2026-01-22 12:58:37.646430 | fa163e0d-6f45-64a1-ca76-00000000327c | TASK | Check if /etc/sysconfig/podman_drop_in exists 2026-01-22 12:58:37.907527 | fa163e0d-6f45-64a1-ca76-00000000327c | OK | Check if /etc/sysconfig/podman_drop_in exists | standalone 2026-01-22 12:58:37.908907 | fa163e0d-6f45-64a1-ca76-00000000327c | TIMING | tripleo_container_manage : Check if /etc/sysconfig/podman_drop_in exists | standalone | 0:19:31.873913 | 0.26s 2026-01-22 12:58:37.971802 | fa163e0d-6f45-64a1-ca76-00000000327d | TASK | Set podman_drop_in fact 2026-01-22 12:58:38.032908 | fa163e0d-6f45-64a1-ca76-00000000327d | OK | Set podman_drop_in fact | standalone 2026-01-22 12:58:38.034037 | fa163e0d-6f45-64a1-ca76-00000000327d | TIMING | tripleo_container_manage : Set podman_drop_in fact | standalone | 0:19:31.999046 | 0.06s 2026-01-22 12:58:38.062548 | fa163e0d-6f45-64a1-ca76-00000000327f | TASK | Deploy tripleo-container-shutdown and tripleo-start-podman-container 2026-01-22 12:58:38.579113 | fa163e0d-6f45-64a1-ca76-00000000327f | OK | Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | item=tripleo-container-shutdown 2026-01-22 12:58:38.580019 | fa163e0d-6f45-64a1-ca76-00000000327f | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:19:32.545032 | 0.52s 2026-01-22 12:58:38.993919 | fa163e0d-6f45-64a1-ca76-00000000327f | OK | Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | item=tripleo-start-podman-container 2026-01-22 12:58:38.995010 | fa163e0d-6f45-64a1-ca76-00000000327f | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:19:32.960019 | 0.93s 2026-01-22 12:58:39.002303 | fa163e0d-6f45-64a1-ca76-00000000327f | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:19:32.967303 | 0.94s 2026-01-22 12:58:39.031408 | fa163e0d-6f45-64a1-ca76-000000003280 | TASK | Create /usr/lib/systemd/system/tripleo-container-shutdown.service 2026-01-22 12:58:39.489635 | fa163e0d-6f45-64a1-ca76-000000003280 | OK | Create /usr/lib/systemd/system/tripleo-container-shutdown.service | standalone 2026-01-22 12:58:39.491143 | fa163e0d-6f45-64a1-ca76-000000003280 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system/tripleo-container-shutdown.service | standalone | 0:19:33.456150 | 0.46s 2026-01-22 12:58:39.521896 | fa163e0d-6f45-64a1-ca76-000000003281 | TASK | Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset 2026-01-22 12:58:39.968603 | fa163e0d-6f45-64a1-ca76-000000003281 | OK | Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset | standalone 2026-01-22 12:58:39.970234 | fa163e0d-6f45-64a1-ca76-000000003281 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset | standalone | 0:19:33.935241 | 0.45s 2026-01-22 12:58:39.998546 | fa163e0d-6f45-64a1-ca76-000000003282 | TASK | Enable and start tripleo-container-shutdown 2026-01-22 12:58:41.647594 | fa163e0d-6f45-64a1-ca76-000000003282 | OK | Enable and start tripleo-container-shutdown | standalone 2026-01-22 12:58:41.649755 | fa163e0d-6f45-64a1-ca76-000000003282 | TIMING | tripleo_container_manage : Enable and start tripleo-container-shutdown | standalone | 0:19:35.614758 | 1.65s 2026-01-22 12:58:41.684390 | fa163e0d-6f45-64a1-ca76-000000003283 | TASK | Create /usr/lib/systemd/system/netns-placeholder.service 2026-01-22 12:58:42.111259 | fa163e0d-6f45-64a1-ca76-000000003283 | OK | Create /usr/lib/systemd/system/netns-placeholder.service | standalone 2026-01-22 12:58:42.112878 | fa163e0d-6f45-64a1-ca76-000000003283 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system/netns-placeholder.service | standalone | 0:19:36.077883 | 0.43s 2026-01-22 12:58:42.145991 | fa163e0d-6f45-64a1-ca76-000000003284 | TASK | Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset 2026-01-22 12:58:42.625547 | fa163e0d-6f45-64a1-ca76-000000003284 | OK | Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset | standalone 2026-01-22 12:58:42.627246 | fa163e0d-6f45-64a1-ca76-000000003284 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset | standalone | 0:19:36.592247 | 0.48s 2026-01-22 12:58:42.657155 | fa163e0d-6f45-64a1-ca76-000000003285 | TASK | Enable and start netns-placeholder 2026-01-22 12:58:43.317822 | fa163e0d-6f45-64a1-ca76-000000003285 | CHANGED | Enable and start netns-placeholder | standalone 2026-01-22 12:58:43.320061 | fa163e0d-6f45-64a1-ca76-000000003285 | TIMING | tripleo_container_manage : Enable and start netns-placeholder | standalone | 0:19:37.285067 | 0.66s 2026-01-22 12:58:43.352612 | fa163e0d-6f45-64a1-ca76-00000000324b | TASK | Update container configs with new config hashes 2026-01-22 12:58:43.415025 | fa163e0d-6f45-64a1-ca76-00000000324b | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:19:37.380023 | 0.06s 2026-01-22 12:58:43.436629 | 0120ec9d-557e-4ab0-afd2-c65db39c81fa | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/puppet_config.yml | standalone 2026-01-22 12:58:43.475156 | fa163e0d-6f45-64a1-ca76-0000000032ad | TASK | Update config hashes for container startup configs 2026-01-22 12:58:43.732348 | fa163e0d-6f45-64a1-ca76-0000000032ad | OK | Update config hashes for container startup configs | standalone 2026-01-22 12:58:43.733766 | fa163e0d-6f45-64a1-ca76-0000000032ad | TIMING | tripleo_container_manage : Update config hashes for container startup configs | standalone | 0:19:37.698767 | 0.26s 2026-01-22 12:58:43.764414 | fa163e0d-6f45-64a1-ca76-00000000324c | TASK | Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_2 2026-01-22 12:58:43.802724 | fa163e0d-6f45-64a1-ca76-00000000324c | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_2 | standalone | 0:19:37.767727 | 0.04s 2026-01-22 12:58:43.826306 | 36508d8e-a45b-464b-8195-3c441a1a2025 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/delete_orphan.yml | standalone 2026-01-22 12:58:43.863769 | fa163e0d-6f45-64a1-ca76-0000000032cc | TASK | Gather podman infos 2026-01-22 12:58:44.855323 | fa163e0d-6f45-64a1-ca76-0000000032cc | OK | Gather podman infos | standalone 2026-01-22 12:58:44.856469 | fa163e0d-6f45-64a1-ca76-0000000032cc | TIMING | tripleo_container_manage : Gather podman infos | standalone | 0:19:38.821478 | 0.99s 2026-01-22 12:58:44.928938 | fa163e0d-6f45-64a1-ca76-0000000032cd | TASK | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_2 2026-01-22 12:58:44.994271 | fa163e0d-6f45-64a1-ca76-0000000032cd | TIMING | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_2 | standalone | 0:19:38.959268 | 0.06s 2026-01-22 12:58:45.131706 | fa163e0d-6f45-64a1-ca76-000000003316 | TIMING | tripleo_container_rm : include_tasks | standalone | 0:19:39.096699 | 0.07s 2026-01-22 12:58:45.189559 | fa163e0d-6f45-64a1-ca76-00000000324d | TASK | Create containers from /var/lib/tripleo-config/container-startup-config/step_2 2026-01-22 12:58:45.242396 | fa163e0d-6f45-64a1-ca76-00000000324d | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-startup-config/step_2 | standalone | 0:19:39.207395 | 0.05s 2026-01-22 12:58:45.266401 | f3ad2982-6063-4e7d-b5dd-025d3c443f81 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/create.yml | standalone 2026-01-22 12:58:45.304928 | fa163e0d-6f45-64a1-ca76-00000000333c | TASK | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_2 2026-01-22 12:59:08.469009 | fa163e0d-6f45-64a1-ca76-00000000333c | CHANGED | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_2 | standalone 2026-01-22 12:59:08.470148 | fa163e0d-6f45-64a1-ca76-00000000333c | TIMING | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_2 | standalone | 0:20:02.435163 | 23.16s 2026-01-22 12:59:08.495069 | fa163e0d-6f45-64a1-ca76-00000000333d | TASK | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_2 2026-01-22 12:59:11.167739 | fa163e0d-6f45-64a1-ca76-00000000333d | CHANGED | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_2 | standalone 2026-01-22 12:59:11.169356 | fa163e0d-6f45-64a1-ca76-00000000333d | TIMING | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_2 | standalone | 0:20:05.134363 | 2.67s 2026-01-22 12:59:11.241905 | fa163e0d-6f45-64a1-ca76-0000000031aa | TASK | Clean container_puppet_tasks for standalone step 2 2026-01-22 12:59:11.480503 | fa163e0d-6f45-64a1-ca76-0000000031aa | OK | Clean container_puppet_tasks for standalone step 2 | standalone 2026-01-22 12:59:11.481884 | fa163e0d-6f45-64a1-ca76-0000000031aa | TIMING | Clean container_puppet_tasks for standalone step 2 | standalone | 0:20:05.446892 | 0.24s 2026-01-22 12:59:11.528784 | fa163e0d-6f45-64a1-ca76-0000000031ab | TASK | Calculate container_puppet_tasks for standalone step 2 2026-01-22 12:59:11.580038 | fa163e0d-6f45-64a1-ca76-0000000031ab | TIMING | Calculate container_puppet_tasks for standalone step 2 | standalone | 0:20:05.545035 | 0.05s 2026-01-22 12:59:11.611056 | fa163e0d-6f45-64a1-ca76-0000000031ac | TASK | Include container-puppet tasks for step 2 2026-01-22 12:59:11.653584 | fa163e0d-6f45-64a1-ca76-0000000031ac | TIMING | Include container-puppet tasks for step 2 | standalone | 0:20:05.618580 | 0.04s 2026-01-22 12:59:11.678176 | ac2ba6f0-1d54-4a16-9709-2e7029aef5a8 | INCLUDED | /root/standalone-ansible-mz1ymllk/host-container-puppet-tasks.yaml | standalone 2026-01-22 12:59:11.726723 | fa163e0d-6f45-64a1-ca76-000000003388 | TASK | Write container-puppet-tasks json file for standalone step 2 2026-01-22 12:59:12.246345 | fa163e0d-6f45-64a1-ca76-000000003388 | CHANGED | Write container-puppet-tasks json file for standalone step 2 | standalone 2026-01-22 12:59:12.247632 | fa163e0d-6f45-64a1-ca76-000000003388 | TIMING | Write container-puppet-tasks json file for standalone step 2 | standalone | 0:20:06.212640 | 0.52s 2026-01-22 12:59:12.279406 | fa163e0d-6f45-64a1-ca76-00000000338a | TASK | Generate container puppet configs for step 2 2026-01-22 12:59:12.555504 | fa163e0d-6f45-64a1-ca76-00000000338a | OK | Generate container puppet configs for step 2 | standalone 2026-01-22 12:59:12.556956 | fa163e0d-6f45-64a1-ca76-00000000338a | TIMING | Generate container puppet configs for step 2 | standalone | 0:20:06.521962 | 0.28s 2026-01-22 12:59:12.587355 | fa163e0d-6f45-64a1-ca76-00000000338b | TASK | Manage Puppet containers (bootstrap tasks) for step 2 with tripleo-ansible 2026-01-22 12:59:12.619568 | fa163e0d-6f45-64a1-ca76-00000000338b | TIMING | Manage Puppet containers (bootstrap tasks) for step 2 with tripleo-ansible | standalone | 0:20:06.584569 | 0.03s 2026-01-22 12:59:12.689123 | fa163e0d-6f45-64a1-ca76-0000000033ba | TASK | Gather variables for each operating system 2026-01-22 12:59:12.811756 | fa163e0d-6f45-64a1-ca76-0000000033ba | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:20:06.776757 | 0.12s 2026-01-22 12:59:12.841784 | fa163e0d-6f45-64a1-ca76-0000000033bb | TASK | Create container logs path 2026-01-22 12:59:13.107508 | fa163e0d-6f45-64a1-ca76-0000000033bb | OK | Create container logs path | standalone 2026-01-22 12:59:13.108927 | fa163e0d-6f45-64a1-ca76-0000000033bb | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:20:07.073936 | 0.27s 2026-01-22 12:59:13.142562 | fa163e0d-6f45-64a1-ca76-0000000033bd | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_2 2026-01-22 12:59:13.399263 | fa163e0d-6f45-64a1-ca76-0000000033bd | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_2 | standalone 2026-01-22 12:59:13.400473 | fa163e0d-6f45-64a1-ca76-0000000033bd | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_2 | standalone | 0:20:07.365481 | 0.26s 2026-01-22 12:59:13.429749 | fa163e0d-6f45-64a1-ca76-0000000033be | TASK | Finalise hashes for all containers 2026-01-22 12:59:13.479629 | fa163e0d-6f45-64a1-ca76-0000000033be | OK | Finalise hashes for all containers | standalone 2026-01-22 12:59:13.480736 | fa163e0d-6f45-64a1-ca76-0000000033be | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:20:07.445743 | 0.05s 2026-01-22 12:59:13.509955 | fa163e0d-6f45-64a1-ca76-0000000033c0 | TASK | Manage systemd shutdown files 2026-01-22 12:59:13.544568 | fa163e0d-6f45-64a1-ca76-0000000033c0 | SKIPPED | Manage systemd shutdown files | standalone 2026-01-22 12:59:13.545441 | fa163e0d-6f45-64a1-ca76-0000000033c0 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:20:07.510451 | 0.03s 2026-01-22 12:59:13.574296 | fa163e0d-6f45-64a1-ca76-0000000033c2 | TASK | Update container configs with new config hashes 2026-01-22 12:59:13.627549 | fa163e0d-6f45-64a1-ca76-0000000033c2 | SKIPPED | Update container configs with new config hashes | standalone 2026-01-22 12:59:13.628846 | fa163e0d-6f45-64a1-ca76-0000000033c2 | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:20:07.593852 | 0.05s 2026-01-22 12:59:13.660746 | fa163e0d-6f45-64a1-ca76-0000000033c3 | TASK | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_2 2026-01-22 12:59:13.712849 | fa163e0d-6f45-64a1-ca76-0000000033c3 | SKIPPED | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_2 | standalone 2026-01-22 12:59:13.714074 | fa163e0d-6f45-64a1-ca76-0000000033c3 | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_2 | standalone | 0:20:07.679079 | 0.05s 2026-01-22 12:59:13.745732 | fa163e0d-6f45-64a1-ca76-0000000033c4 | TASK | Create containers from /var/lib/tripleo-config/container-puppet-config/step_2 2026-01-22 12:59:13.796062 | fa163e0d-6f45-64a1-ca76-0000000033c4 | SKIPPED | Create containers from /var/lib/tripleo-config/container-puppet-config/step_2 | standalone 2026-01-22 12:59:13.797115 | fa163e0d-6f45-64a1-ca76-0000000033c4 | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-puppet-config/step_2 | standalone | 0:20:07.762123 | 0.05s PLAY [External deployment step 3] ********************************************** 2026-01-22 12:59:13.959846 | fa163e0d-6f45-64a1-ca76-0000000000d2 | TASK | External deployment step 3 2026-01-22 12:59:13.997085 | fa163e0d-6f45-64a1-ca76-0000000000d2 | OK | External deployment step 3 | undercloud -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'External deployment step 3' to resume from this task" } [WARNING]: ('undercloud -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000d2') missing from stats 2026-01-22 12:59:14.056413 | fa163e0d-6f45-64a1-ca76-0000000000d3 | TIMING | include_tasks | undercloud | 0:20:08.021415 | 0.04s 2026-01-22 12:59:14.062930 | aaec768c-e7a5-4364-8341-e8f5a320d9d3 | INCLUDED | /root/standalone-ansible-mz1ymllk/external_deploy_steps_tasks_step3.yaml | undercloud PLAY [Deploy step tasks for 3] ************************************************* 2026-01-22 12:59:14.199370 | fa163e0d-6f45-64a1-ca76-0000000000d6 | TASK | Deploy step tasks for 3 2026-01-22 12:59:14.227951 | fa163e0d-6f45-64a1-ca76-0000000000d6 | OK | Deploy step tasks for 3 | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Deploy step tasks for 3' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000d6') missing from stats 2026-01-22 12:59:14.251244 | fa163e0d-6f45-64a1-ca76-0000000000d7 | TASK | Write the config_step hieradata for the deploy step 3 tasks 2026-01-22 12:59:14.717916 | fa163e0d-6f45-64a1-ca76-0000000000d7 | CHANGED | Write the config_step hieradata for the deploy step 3 tasks | standalone 2026-01-22 12:59:14.719712 | fa163e0d-6f45-64a1-ca76-0000000000d7 | TIMING | Write the config_step hieradata for the deploy step 3 tasks | standalone | 0:20:08.684669 | 0.47s [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: '{{ playbook_dir }}/{{ _task_file_path }}' is exists 2026-01-22 12:59:14.792596 | fa163e0d-6f45-64a1-ca76-0000000000d8 | TIMING | include_tasks | standalone | 0:20:08.757601 | 0.04s 2026-01-22 12:59:14.820864 | 5a066288-8ee5-411d-a37c-14ace5df116a | INCLUDED | /root/standalone-ansible-mz1ymllk/Standalone/deploy_steps_tasks_step3.yaml | standalone 2026-01-22 12:59:14.860956 | fa163e0d-6f45-64a1-ca76-000000003428 | TASK | Start OVN container 2026-01-22 12:59:14.945066 | fa163e0d-6f45-64a1-ca76-000000003428 | TIMING | Start OVN container | standalone | 0:20:08.910045 | 0.08s 2026-01-22 12:59:14.977989 | fa163e0d-6f45-64a1-ca76-000000003428 | TIMING | Start OVN container | standalone | 0:20:08.942995 | 0.11s 2026-01-22 12:59:15.005163 | fa163e0d-6f45-64a1-ca76-000000003428 | TIMING | Start OVN container | standalone | 0:20:08.970170 | 0.14s 2026-01-22 12:59:15.011511 | fa163e0d-6f45-64a1-ca76-000000003428 | TIMING | Start OVN container | standalone | 0:20:08.976532 | 0.15s 2026-01-22 12:59:15.104621 | fa163e0d-6f45-64a1-ca76-000000003460 | TASK | Gather variables for each operating system 2026-01-22 12:59:15.225682 | fa163e0d-6f45-64a1-ca76-000000003460 | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:20:09.190657 | 0.12s 2026-01-22 12:59:15.250129 | fa163e0d-6f45-64a1-ca76-000000003461 | TASK | Create container logs path 2026-01-22 12:59:15.508365 | fa163e0d-6f45-64a1-ca76-000000003461 | OK | Create container logs path | standalone 2026-01-22 12:59:15.509791 | fa163e0d-6f45-64a1-ca76-000000003461 | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:20:09.474798 | 0.26s 2026-01-22 12:59:15.542711 | fa163e0d-6f45-64a1-ca76-000000003463 | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:15.840230 | fa163e0d-6f45-64a1-ca76-000000003463 | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_0 | standalone 2026-01-22 12:59:15.841509 | fa163e0d-6f45-64a1-ca76-000000003463 | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:09.806515 | 0.30s 2026-01-22 12:59:15.875507 | fa163e0d-6f45-64a1-ca76-000000003464 | TASK | Finalise hashes for all containers 2026-01-22 12:59:15.980414 | fa163e0d-6f45-64a1-ca76-000000003464 | OK | Finalise hashes for all containers | standalone 2026-01-22 12:59:15.981576 | fa163e0d-6f45-64a1-ca76-000000003464 | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:20:09.946582 | 0.10s 2026-01-22 12:59:16.010722 | fa163e0d-6f45-64a1-ca76-000000003466 | TASK | Manage systemd shutdown files 2026-01-22 12:59:16.051341 | fa163e0d-6f45-64a1-ca76-000000003466 | SKIPPED | Manage systemd shutdown files | standalone 2026-01-22 12:59:16.052516 | fa163e0d-6f45-64a1-ca76-000000003466 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:20:10.017523 | 0.04s 2026-01-22 12:59:16.083167 | fa163e0d-6f45-64a1-ca76-000000003468 | TASK | Update container configs with new config hashes 2026-01-22 12:59:16.138214 | fa163e0d-6f45-64a1-ca76-000000003468 | SKIPPED | Update container configs with new config hashes | standalone 2026-01-22 12:59:16.139510 | fa163e0d-6f45-64a1-ca76-000000003468 | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:20:10.104516 | 0.05s 2026-01-22 12:59:16.171631 | fa163e0d-6f45-64a1-ca76-000000003469 | TASK | Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:16.226866 | fa163e0d-6f45-64a1-ca76-000000003469 | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:10.191865 | 0.05s 2026-01-22 12:59:16.251805 | fdb25f4a-e9df-49b5-8b09-b8a4a0252f4a | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/delete_orphan.yml | standalone 2026-01-22 12:59:16.288505 | fa163e0d-6f45-64a1-ca76-0000000034f5 | TASK | Gather podman infos 2026-01-22 12:59:17.388562 | fa163e0d-6f45-64a1-ca76-0000000034f5 | OK | Gather podman infos | standalone 2026-01-22 12:59:17.389707 | fa163e0d-6f45-64a1-ca76-0000000034f5 | TIMING | tripleo_container_manage : Gather podman infos | standalone | 0:20:11.354715 | 1.10s 2026-01-22 12:59:17.475777 | fa163e0d-6f45-64a1-ca76-0000000034f6 | TASK | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:17.564756 | fa163e0d-6f45-64a1-ca76-0000000034f6 | TIMING | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:11.529755 | 0.08s 2026-01-22 12:59:17.724070 | fa163e0d-6f45-64a1-ca76-000000003537 | TIMING | tripleo_container_rm : include_tasks | standalone | 0:20:11.689068 | 0.09s 2026-01-22 12:59:17.781053 | fa163e0d-6f45-64a1-ca76-00000000346a | TASK | Create containers from /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:17.848077 | fa163e0d-6f45-64a1-ca76-00000000346a | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:11.813079 | 0.07s 2026-01-22 12:59:17.870620 | 1ef75778-56a8-4a08-a54f-b452264ae993 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/create.yml | standalone 2026-01-22 12:59:17.911076 | fa163e0d-6f45-64a1-ca76-000000003560 | TASK | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:18.830333 | fa163e0d-6f45-64a1-ca76-000000003560 | CHANGED | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 | standalone 2026-01-22 12:59:18.831384 | fa163e0d-6f45-64a1-ca76-000000003560 | TIMING | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:12.796399 | 0.92s 2026-01-22 12:59:18.855278 | fa163e0d-6f45-64a1-ca76-000000003561 | TASK | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:21.609531 | fa163e0d-6f45-64a1-ca76-000000003561 | CHANGED | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_0 | standalone 2026-01-22 12:59:21.611187 | fa163e0d-6f45-64a1-ca76-000000003561 | TIMING | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:15.576193 | 2.75s 2026-01-22 12:59:21.661055 | fa163e0d-6f45-64a1-ca76-00000000348b | TASK | Gather variables for each operating system 2026-01-22 12:59:21.765093 | fa163e0d-6f45-64a1-ca76-00000000348b | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:20:15.730098 | 0.10s 2026-01-22 12:59:21.788765 | fa163e0d-6f45-64a1-ca76-00000000348c | TASK | Create container logs path 2026-01-22 12:59:22.029376 | fa163e0d-6f45-64a1-ca76-00000000348c | OK | Create container logs path | standalone 2026-01-22 12:59:22.030442 | fa163e0d-6f45-64a1-ca76-00000000348c | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:20:15.995454 | 0.24s 2026-01-22 12:59:22.054756 | fa163e0d-6f45-64a1-ca76-00000000348e | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:22.367212 | fa163e0d-6f45-64a1-ca76-00000000348e | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_0 | standalone 2026-01-22 12:59:22.368261 | fa163e0d-6f45-64a1-ca76-00000000348e | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:16.333273 | 0.31s 2026-01-22 12:59:22.391154 | fa163e0d-6f45-64a1-ca76-00000000348f | TASK | Finalise hashes for all containers 2026-01-22 12:59:22.491838 | fa163e0d-6f45-64a1-ca76-00000000348f | OK | Finalise hashes for all containers | standalone 2026-01-22 12:59:22.492794 | fa163e0d-6f45-64a1-ca76-00000000348f | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:20:16.457808 | 0.10s 2026-01-22 12:59:22.515753 | fa163e0d-6f45-64a1-ca76-000000003491 | TASK | Manage systemd shutdown files 2026-01-22 12:59:22.574434 | fa163e0d-6f45-64a1-ca76-000000003491 | SKIPPED | Manage systemd shutdown files | standalone 2026-01-22 12:59:22.575629 | fa163e0d-6f45-64a1-ca76-000000003491 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:20:16.540637 | 0.06s 2026-01-22 12:59:22.606756 | fa163e0d-6f45-64a1-ca76-000000003493 | TASK | Update container configs with new config hashes 2026-01-22 12:59:22.679689 | fa163e0d-6f45-64a1-ca76-000000003493 | SKIPPED | Update container configs with new config hashes | standalone 2026-01-22 12:59:22.681126 | fa163e0d-6f45-64a1-ca76-000000003493 | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:20:16.646129 | 0.07s 2026-01-22 12:59:22.713904 | fa163e0d-6f45-64a1-ca76-000000003494 | TASK | Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:22.776325 | fa163e0d-6f45-64a1-ca76-000000003494 | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:16.741325 | 0.06s 2026-01-22 12:59:22.801878 | f7da1af0-48c9-4b81-bb94-c154a1b7db76 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/delete_orphan.yml | standalone 2026-01-22 12:59:22.838333 | fa163e0d-6f45-64a1-ca76-0000000035ac | TASK | Gather podman infos 2026-01-22 12:59:24.006521 | fa163e0d-6f45-64a1-ca76-0000000035ac | OK | Gather podman infos | standalone 2026-01-22 12:59:24.007595 | fa163e0d-6f45-64a1-ca76-0000000035ac | TIMING | tripleo_container_manage : Gather podman infos | standalone | 0:20:17.972604 | 1.17s 2026-01-22 12:59:24.091710 | fa163e0d-6f45-64a1-ca76-0000000035ad | TASK | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:24.149628 | fa163e0d-6f45-64a1-ca76-0000000035ad | TIMING | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:18.114634 | 0.05s 2026-01-22 12:59:24.264170 | fa163e0d-6f45-64a1-ca76-000000003537 | TIMING | tripleo_container_rm : include_tasks | standalone | 0:20:18.229167 | 0.06s 2026-01-22 12:59:24.322774 | fa163e0d-6f45-64a1-ca76-000000003495 | TASK | Create containers from /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:24.375899 | fa163e0d-6f45-64a1-ca76-000000003495 | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:18.340904 | 0.05s 2026-01-22 12:59:24.393173 | 98252fa3-7bc4-4937-803f-337c119041e5 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/create.yml | standalone 2026-01-22 12:59:24.423978 | fa163e0d-6f45-64a1-ca76-000000003612 | TASK | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:25.278569 | fa163e0d-6f45-64a1-ca76-000000003612 | CHANGED | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 | standalone 2026-01-22 12:59:25.279513 | fa163e0d-6f45-64a1-ca76-000000003612 | TIMING | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:19.244529 | 0.85s 2026-01-22 12:59:25.299446 | fa163e0d-6f45-64a1-ca76-000000003613 | TASK | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:29.156612 | fa163e0d-6f45-64a1-ca76-000000003613 | CHANGED | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_0 | standalone 2026-01-22 12:59:29.159250 | fa163e0d-6f45-64a1-ca76-000000003613 | TIMING | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:23.124256 | 3.86s 2026-01-22 12:59:29.213587 | fa163e0d-6f45-64a1-ca76-0000000034b6 | TASK | Gather variables for each operating system 2026-01-22 12:59:29.297288 | fa163e0d-6f45-64a1-ca76-0000000034b6 | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:20:23.262289 | 0.08s 2026-01-22 12:59:29.326735 | fa163e0d-6f45-64a1-ca76-0000000034b7 | TASK | Create container logs path 2026-01-22 12:59:29.556396 | fa163e0d-6f45-64a1-ca76-0000000034b7 | OK | Create container logs path | standalone 2026-01-22 12:59:29.557757 | fa163e0d-6f45-64a1-ca76-0000000034b7 | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:20:23.522765 | 0.23s 2026-01-22 12:59:29.588069 | fa163e0d-6f45-64a1-ca76-0000000034b9 | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:29.908158 | fa163e0d-6f45-64a1-ca76-0000000034b9 | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_0 | standalone 2026-01-22 12:59:29.909074 | fa163e0d-6f45-64a1-ca76-0000000034b9 | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:23.874090 | 0.32s 2026-01-22 12:59:29.928049 | fa163e0d-6f45-64a1-ca76-0000000034ba | TASK | Finalise hashes for all containers 2026-01-22 12:59:30.029643 | fa163e0d-6f45-64a1-ca76-0000000034ba | OK | Finalise hashes for all containers | standalone 2026-01-22 12:59:30.030996 | fa163e0d-6f45-64a1-ca76-0000000034ba | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:20:23.996003 | 0.10s 2026-01-22 12:59:30.061515 | fa163e0d-6f45-64a1-ca76-0000000034bc | TASK | Manage systemd shutdown files 2026-01-22 12:59:30.116976 | fa163e0d-6f45-64a1-ca76-0000000034bc | SKIPPED | Manage systemd shutdown files | standalone 2026-01-22 12:59:30.118238 | fa163e0d-6f45-64a1-ca76-0000000034bc | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:20:24.083244 | 0.05s 2026-01-22 12:59:30.148014 | fa163e0d-6f45-64a1-ca76-0000000034be | TASK | Update container configs with new config hashes 2026-01-22 12:59:30.221069 | fa163e0d-6f45-64a1-ca76-0000000034be | SKIPPED | Update container configs with new config hashes | standalone 2026-01-22 12:59:30.222342 | fa163e0d-6f45-64a1-ca76-0000000034be | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:20:24.187347 | 0.07s 2026-01-22 12:59:30.255457 | fa163e0d-6f45-64a1-ca76-0000000034bf | TASK | Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:30.330619 | fa163e0d-6f45-64a1-ca76-0000000034bf | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:24.295615 | 0.07s 2026-01-22 12:59:30.356546 | b77df245-9590-4d8c-88e3-5af3ab9d60b6 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/delete_orphan.yml | standalone 2026-01-22 12:59:30.391976 | fa163e0d-6f45-64a1-ca76-00000000365e | TASK | Gather podman infos 2026-01-22 12:59:31.587895 | fa163e0d-6f45-64a1-ca76-00000000365e | OK | Gather podman infos | standalone 2026-01-22 12:59:31.588540 | fa163e0d-6f45-64a1-ca76-00000000365e | TIMING | tripleo_container_manage : Gather podman infos | standalone | 0:20:25.553559 | 1.19s 2026-01-22 12:59:31.675377 | fa163e0d-6f45-64a1-ca76-00000000365f | TASK | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:31.767374 | fa163e0d-6f45-64a1-ca76-00000000365f | TIMING | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:25.732371 | 0.08s 2026-01-22 12:59:31.915489 | fa163e0d-6f45-64a1-ca76-000000003537 | TIMING | tripleo_container_rm : include_tasks | standalone | 0:20:25.880486 | 0.09s 2026-01-22 12:59:31.976259 | fa163e0d-6f45-64a1-ca76-0000000034c0 | TASK | Create containers from /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:32.021406 | fa163e0d-6f45-64a1-ca76-0000000034c0 | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:25.986404 | 0.04s 2026-01-22 12:59:32.046604 | f8b807bc-54d1-42cd-a166-e8f59ed76c8f | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/create.yml | standalone 2026-01-22 12:59:32.081991 | fa163e0d-6f45-64a1-ca76-0000000036c4 | TASK | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:33.099920 | fa163e0d-6f45-64a1-ca76-0000000036c4 | CHANGED | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 | standalone 2026-01-22 12:59:33.100913 | fa163e0d-6f45-64a1-ca76-0000000036c4 | TIMING | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:27.065927 | 1.02s 2026-01-22 12:59:33.124689 | fa163e0d-6f45-64a1-ca76-0000000036c5 | TASK | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_0 2026-01-22 12:59:36.030970 | fa163e0d-6f45-64a1-ca76-0000000036c5 | CHANGED | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_0 | standalone 2026-01-22 12:59:36.032869 | fa163e0d-6f45-64a1-ca76-0000000036c5 | TIMING | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_0 | standalone | 0:20:29.997876 | 2.91s 2026-01-22 12:59:36.088457 | fa163e0d-6f45-64a1-ca76-00000000342a | TASK | Set NB connection 2026-01-22 12:59:36.589740 | fa163e0d-6f45-64a1-ca76-00000000342a | CHANGED | Set NB connection | standalone 2026-01-22 12:59:36.591089 | fa163e0d-6f45-64a1-ca76-00000000342a | TIMING | Set NB connection | standalone | 0:20:30.556098 | 0.50s 2026-01-22 12:59:36.619086 | fa163e0d-6f45-64a1-ca76-00000000342b | TASK | Set SB connection 2026-01-22 12:59:37.069050 | fa163e0d-6f45-64a1-ca76-00000000342b | CHANGED | Set SB connection | standalone 2026-01-22 12:59:37.070548 | fa163e0d-6f45-64a1-ca76-00000000342b | TIMING | Set SB connection | standalone | 0:20:31.035555 | 0.45s 2026-01-22 12:59:37.098755 | fa163e0d-6f45-64a1-ca76-0000000000da | TASK | Check if /var/lib/tripleo-config/container-startup-config/step_3 already exists 2026-01-22 12:59:37.307171 | fa163e0d-6f45-64a1-ca76-0000000000da | OK | Check if /var/lib/tripleo-config/container-startup-config/step_3 already exists | standalone 2026-01-22 12:59:37.308817 | fa163e0d-6f45-64a1-ca76-0000000000da | TIMING | Check if /var/lib/tripleo-config/container-startup-config/step_3 already exists | standalone | 0:20:31.273823 | 0.21s 2026-01-22 12:59:37.399976 | fa163e0d-6f45-64a1-ca76-0000000000db | TIMING | include_tasks | standalone | 0:20:31.364969 | 0.06s 2026-01-22 12:59:37.439674 | dc9c7cf5-c262-40a6-9a3d-3f06a0e5223d | INCLUDED | /root/standalone-ansible-mz1ymllk/common_deploy_steps_tasks.yaml | standalone 2026-01-22 12:59:37.468091 | fa163e0d-6f45-64a1-ca76-00000000370a | TASK | Write the config_step hieradata 2026-01-22 12:59:37.943053 | fa163e0d-6f45-64a1-ca76-00000000370a | OK | Write the config_step hieradata | standalone 2026-01-22 12:59:37.944325 | fa163e0d-6f45-64a1-ca76-00000000370a | TIMING | Write the config_step hieradata | standalone | 0:20:31.909332 | 0.48s 2026-01-22 12:59:37.975430 | fa163e0d-6f45-64a1-ca76-00000000370b | TASK | Run puppet host configuration for step 3 2026-01-22 12:59:38.277296 | fa163e0d-6f45-64a1-ca76-00000000370b | CHANGED | Run puppet host configuration for step 3 | standalone 2026-01-22 12:59:38.278025 | fa163e0d-6f45-64a1-ca76-00000000370b | TIMING | Run puppet host configuration for step 3 | standalone | 0:20:32.243042 | 0.30s 2026-01-22 12:59:38.297283 | fa163e0d-6f45-64a1-ca76-00000000370c | TASK | Wait for puppet host configuration to finish 2026-01-22 12:59:38.515020 | fa163e0d-6f45-64a1-ca76-00000000370c | WAITING | Wait for puppet host configuration to finish | standalone | 360 retries left 2026-01-22 12:59:48.750810 | fa163e0d-6f45-64a1-ca76-00000000370c | WAITING | Wait for puppet host configuration to finish | standalone | 359 retries left 2026-01-22 12:59:58.943000 | fa163e0d-6f45-64a1-ca76-00000000370c | CHANGED | Wait for puppet host configuration to finish | standalone 2026-01-22 12:59:58.944484 | fa163e0d-6f45-64a1-ca76-00000000370c | TIMING | Wait for puppet host configuration to finish | standalone | 0:20:52.909483 | 20.65s 2026-01-22 12:59:58.969310 | fa163e0d-6f45-64a1-ca76-00000000370d | TASK | Debug output for task: Run puppet host configuration for step 3 2026-01-22 12:59:59.050462 | fa163e0d-6f45-64a1-ca76-00000000370d | CHANGED | Debug output for task: Run puppet host configuration for step 3 | standalone | result={ "changed": true, "failed_when_result": false, "puppet_host_outputs.stdout_lines | default([]) | union(puppet_host_outputs.stderr_lines | default([]))": [ "<13>Jan 22 12:59:38 puppet-user: Warning: /etc/puppet/hiera.yaml: Use of 'hiera.yaml' version 3 is deprecated. It should be converted to version 5", "<13>Jan 22 12:59:44 puppet-user: (file: /etc/puppet/hiera.yaml)", "<13>Jan 22 12:59:44 puppet-user: Warning: Undefined variable '::deploy_config_name'; ", "<13>Jan 22 12:59:44 puppet-user: (file & line not available)", "<13>Jan 22 12:59:44 puppet-user: Warning: The function 'hiera' is deprecated in favor of using 'lookup'. See https://puppet.com/docs/puppet/7.10/deprecated_language.html", "<13>Jan 22 12:59:44 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/profile/base/database/mysql/client.pp, line: 89, column: 8)", "<13>Jan 22 12:59:45 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/packages.pp, line: 39, column: 69)", "<13>Jan 22 12:59:45 puppet-user: Notice: Compiled catalog for standalone.ooo.test in environment production in 0.41 seconds", "<13>Jan 22 12:59:49 puppet-user: Notice: /Stage[main]/Pacemaker::Resource_defaults/Pcmk_resource_default[resource-stickiness]/ensure: created", "<13>Jan 22 12:59:51 puppet-user: Notice: /Stage[main]/Pacemaker::Resource_op_defaults/Pcmk_resource_op_default[bundle]/ensure: created", "<13>Jan 22 12:59:52 puppet-user: Deprecation Warning: This command is deprecated and will be removed. Please use 'pcs property config' instead.", "<13>Jan 22 12:59:53 puppet-user: Notice: Applied catalog in 7.85 seconds", "<13>Jan 22 12:59:53 puppet-user: Application:", "<13>Jan 22 12:59:53 puppet-user: Initial environment: production", "<13>Jan 22 12:59:53 puppet-user: Converged environment: production", "<13>Jan 22 12:59:53 puppet-user: Run mode: user", "<13>Jan 22 12:59:53 puppet-user: Changes:", "<13>Jan 22 12:59:53 puppet-user: Total: 2", "<13>Jan 22 12:59:53 puppet-user: Events:", "<13>Jan 22 12:59:53 puppet-user: Success: 2", "<13>Jan 22 12:59:53 puppet-user: Resources:", "<13>Jan 22 12:59:53 puppet-user: Changed: 2", "<13>Jan 22 12:59:53 puppet-user: Out of sync: 2", "<13>Jan 22 12:59:53 puppet-user: Total: 28", "<13>Jan 22 12:59:53 puppet-user: Time:", "<13>Jan 22 12:59:53 puppet-user: Filebucket: 0.00", "<13>Jan 22 12:59:53 puppet-user: File line: 0.00", "<13>Jan 22 12:59:53 puppet-user: Schedule: 0.00", "<13>Jan 22 12:59:53 puppet-user: Package: 0.00", "<13>Jan 22 12:59:53 puppet-user: File: 0.00", "<13>Jan 22 12:59:53 puppet-user: Augeas: 0.01", "<13>Jan 22 12:59:53 puppet-user: User: 0.06", "<13>Jan 22 12:59:53 puppet-user: Service: 0.08", "<13>Jan 22 12:59:53 puppet-user: Config retrieval: 0.46", "<13>Jan 22 12:59:53 puppet-user: Pcmk property: 1.46", "<13>Jan 22 12:59:53 puppet-user: Exec: 1.99", "<13>Jan 22 12:59:53 puppet-user: Last run: 1769086793", "<13>Jan 22 12:59:53 puppet-user: Pcmk resource op default: 2.02", "<13>Jan 22 12:59:53 puppet-user: Pcmk resource default: 2.08", "<13>Jan 22 12:59:53 puppet-user: Transaction evaluation: 7.83", "<13>Jan 22 12:59:53 puppet-user: Catalog application: 7.85", "<13>Jan 22 12:59:53 puppet-user: Total: 7.85", "<13>Jan 22 12:59:53 puppet-user: Version:", "<13>Jan 22 12:59:53 puppet-user: Config: 1769086784", "<13>Jan 22 12:59:53 puppet-user: Puppet: 7.10.0" ] } 2026-01-22 12:59:59.051894 | fa163e0d-6f45-64a1-ca76-00000000370d | TIMING | Debug output for task: Run puppet host configuration for step 3 | standalone | 0:20:53.016902 | 0.08s 2026-01-22 12:59:59.080551 | fa163e0d-6f45-64a1-ca76-00000000370e | TASK | Pre-cache facts for puppet containers 2026-01-22 12:59:59.110049 | fa163e0d-6f45-64a1-ca76-00000000370e | TIMING | Pre-cache facts for puppet containers | standalone | 0:20:53.075050 | 0.03s 2026-01-22 12:59:59.179603 | fa163e0d-6f45-64a1-ca76-00000000374d | TASK | Gather variables for each operating system 2026-01-22 12:59:59.293289 | fa163e0d-6f45-64a1-ca76-00000000374d | TIMING | tripleo_puppet_cache : Gather variables for each operating system | standalone | 0:20:53.258278 | 0.11s 2026-01-22 12:59:59.322514 | fa163e0d-6f45-64a1-ca76-00000000374e | TASK | Create puppet caching structures 2026-01-22 12:59:59.578200 | fa163e0d-6f45-64a1-ca76-00000000374e | CHANGED | Create puppet caching structures | standalone 2026-01-22 12:59:59.579788 | fa163e0d-6f45-64a1-ca76-00000000374e | TIMING | tripleo_puppet_cache : Create puppet caching structures | standalone | 0:20:53.544795 | 0.26s 2026-01-22 12:59:59.609188 | fa163e0d-6f45-64a1-ca76-00000000374f | TASK | Check for facter.conf 2026-01-22 12:59:59.854850 | fa163e0d-6f45-64a1-ca76-00000000374f | OK | Check for facter.conf | standalone 2026-01-22 12:59:59.856200 | fa163e0d-6f45-64a1-ca76-00000000374f | TIMING | tripleo_puppet_cache : Check for facter.conf | standalone | 0:20:53.821207 | 0.25s 2026-01-22 12:59:59.885295 | fa163e0d-6f45-64a1-ca76-000000003750 | TASK | Remove facter.conf if directory 2026-01-22 12:59:59.925333 | fa163e0d-6f45-64a1-ca76-000000003750 | SKIPPED | Remove facter.conf if directory | standalone 2026-01-22 12:59:59.926588 | fa163e0d-6f45-64a1-ca76-000000003750 | TIMING | tripleo_puppet_cache : Remove facter.conf if directory | standalone | 0:20:53.891592 | 0.04s 2026-01-22 12:59:59.956327 | fa163e0d-6f45-64a1-ca76-000000003751 | TASK | Write facter cache config 2026-01-22 13:00:00.475201 | fa163e0d-6f45-64a1-ca76-000000003751 | CHANGED | Write facter cache config | standalone 2026-01-22 13:00:00.476953 | fa163e0d-6f45-64a1-ca76-000000003751 | TIMING | tripleo_puppet_cache : Write facter cache config | standalone | 0:20:54.441960 | 0.52s 2026-01-22 13:00:00.506000 | fa163e0d-6f45-64a1-ca76-000000003752 | TASK | Cleanup facter cache if exists 2026-01-22 13:00:00.750431 | fa163e0d-6f45-64a1-ca76-000000003752 | CHANGED | Cleanup facter cache if exists | standalone 2026-01-22 13:00:00.751811 | fa163e0d-6f45-64a1-ca76-000000003752 | TIMING | tripleo_puppet_cache : Cleanup facter cache if exists | standalone | 0:20:54.716819 | 0.24s 2026-01-22 13:00:00.781614 | fa163e0d-6f45-64a1-ca76-000000003753 | TASK | Pre-cache facts 2026-01-22 13:00:01.715001 | fa163e0d-6f45-64a1-ca76-000000003753 | CHANGED | Pre-cache facts | standalone 2026-01-22 13:00:01.716422 | fa163e0d-6f45-64a1-ca76-000000003753 | TIMING | tripleo_puppet_cache : Pre-cache facts | standalone | 0:20:55.681428 | 0.93s 2026-01-22 13:00:01.747005 | fa163e0d-6f45-64a1-ca76-000000003754 | TASK | Failed deployment if facter fails 2026-01-22 13:00:01.780061 | fa163e0d-6f45-64a1-ca76-000000003754 | SKIPPED | Failed deployment if facter fails | standalone 2026-01-22 13:00:01.781195 | fa163e0d-6f45-64a1-ca76-000000003754 | TIMING | tripleo_puppet_cache : Failed deployment if facter fails | standalone | 0:20:55.746202 | 0.03s 2026-01-22 13:00:01.825502 | fa163e0d-6f45-64a1-ca76-000000003755 | TASK | Sync cached facts 2026-01-22 13:00:02.158641 | fa163e0d-6f45-64a1-ca76-000000003755 | CHANGED | Sync cached facts | standalone -> 192.168.122.100 [WARNING]: ('standalone -> 192.168.122.100', 'fa163e0d-6f45-64a1-ca76-000000003755') missing from stats 2026-01-22 13:00:02.214402 | fa163e0d-6f45-64a1-ca76-00000000370f | TASK | Include container-puppet tasks (generate config) during step 1 2026-01-22 13:00:02.271743 | fa163e0d-6f45-64a1-ca76-00000000370f | SKIPPED | Include container-puppet tasks (generate config) during step 1 | standalone 2026-01-22 13:00:02.273239 | fa163e0d-6f45-64a1-ca76-00000000370f | TIMING | Include container-puppet tasks (generate config) during step 1 | standalone | 0:20:56.238245 | 0.06s 2026-01-22 13:00:02.305569 | fa163e0d-6f45-64a1-ca76-000000003711 | TASK | Manage containers for step 3 with tripleo-ansible 2026-01-22 13:00:02.337323 | fa163e0d-6f45-64a1-ca76-000000003711 | TIMING | Manage containers for step 3 with tripleo-ansible | standalone | 0:20:56.302328 | 0.03s 2026-01-22 13:00:02.401669 | fa163e0d-6f45-64a1-ca76-0000000037ac | TASK | Gather variables for each operating system 2026-01-22 13:00:02.527524 | fa163e0d-6f45-64a1-ca76-0000000037ac | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:20:56.492519 | 0.12s 2026-01-22 13:00:02.557672 | fa163e0d-6f45-64a1-ca76-0000000037ad | TASK | Create container logs path 2026-01-22 13:00:02.824638 | fa163e0d-6f45-64a1-ca76-0000000037ad | OK | Create container logs path | standalone 2026-01-22 13:00:02.825859 | fa163e0d-6f45-64a1-ca76-0000000037ad | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:20:56.790868 | 0.27s 2026-01-22 13:00:02.850959 | fa163e0d-6f45-64a1-ca76-0000000037af | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_3 2026-01-22 13:00:03.103000 | fa163e0d-6f45-64a1-ca76-0000000037af | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_3 | standalone 2026-01-22 13:00:03.104296 | fa163e0d-6f45-64a1-ca76-0000000037af | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_3 | standalone | 0:20:57.069305 | 0.25s 2026-01-22 13:00:03.137318 | fa163e0d-6f45-64a1-ca76-0000000037b0 | TASK | Finalise hashes for all containers 2026-01-22 13:00:03.222197 | fa163e0d-6f45-64a1-ca76-0000000037b0 | OK | Finalise hashes for all containers | standalone 2026-01-22 13:00:03.223838 | fa163e0d-6f45-64a1-ca76-0000000037b0 | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:20:57.188841 | 0.08s 2026-01-22 13:00:03.255698 | fa163e0d-6f45-64a1-ca76-0000000037b2 | TASK | Manage systemd shutdown files 2026-01-22 13:00:03.294445 | fa163e0d-6f45-64a1-ca76-0000000037b2 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:20:57.259421 | 0.04s 2026-01-22 13:00:03.331376 | 99f7af43-07a7-4f47-9c38-079fb376c15c | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/shutdown.yml | standalone 2026-01-22 13:00:03.362115 | fa163e0d-6f45-64a1-ca76-0000000037e5 | TASK | Check if /etc/sysconfig/podman_drop_in exists 2026-01-22 13:00:03.580238 | fa163e0d-6f45-64a1-ca76-0000000037e5 | OK | Check if /etc/sysconfig/podman_drop_in exists | standalone 2026-01-22 13:00:03.582916 | fa163e0d-6f45-64a1-ca76-0000000037e5 | TIMING | tripleo_container_manage : Check if /etc/sysconfig/podman_drop_in exists | standalone | 0:20:57.547919 | 0.22s 2026-01-22 13:00:03.613962 | fa163e0d-6f45-64a1-ca76-0000000037e6 | TASK | Set podman_drop_in fact 2026-01-22 13:00:03.660736 | fa163e0d-6f45-64a1-ca76-0000000037e6 | OK | Set podman_drop_in fact | standalone 2026-01-22 13:00:03.662137 | fa163e0d-6f45-64a1-ca76-0000000037e6 | TIMING | tripleo_container_manage : Set podman_drop_in fact | standalone | 0:20:57.627142 | 0.05s 2026-01-22 13:00:03.692206 | fa163e0d-6f45-64a1-ca76-0000000037e8 | TASK | Deploy tripleo-container-shutdown and tripleo-start-podman-container 2026-01-22 13:00:04.207717 | fa163e0d-6f45-64a1-ca76-0000000037e8 | OK | Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | item=tripleo-container-shutdown 2026-01-22 13:00:04.209557 | fa163e0d-6f45-64a1-ca76-0000000037e8 | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:20:58.174555 | 0.52s 2026-01-22 13:00:04.656830 | fa163e0d-6f45-64a1-ca76-0000000037e8 | OK | Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | item=tripleo-start-podman-container 2026-01-22 13:00:04.658320 | fa163e0d-6f45-64a1-ca76-0000000037e8 | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:20:58.623333 | 0.96s 2026-01-22 13:00:04.669210 | fa163e0d-6f45-64a1-ca76-0000000037e8 | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:20:58.634214 | 0.98s 2026-01-22 13:00:04.699537 | fa163e0d-6f45-64a1-ca76-0000000037e9 | TASK | Create /usr/lib/systemd/system/tripleo-container-shutdown.service 2026-01-22 13:00:05.257839 | fa163e0d-6f45-64a1-ca76-0000000037e9 | OK | Create /usr/lib/systemd/system/tripleo-container-shutdown.service | standalone 2026-01-22 13:00:05.259314 | fa163e0d-6f45-64a1-ca76-0000000037e9 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system/tripleo-container-shutdown.service | standalone | 0:20:59.224319 | 0.56s 2026-01-22 13:00:05.289619 | fa163e0d-6f45-64a1-ca76-0000000037ea | TASK | Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset 2026-01-22 13:00:05.757368 | fa163e0d-6f45-64a1-ca76-0000000037ea | OK | Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset | standalone 2026-01-22 13:00:05.758918 | fa163e0d-6f45-64a1-ca76-0000000037ea | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset | standalone | 0:20:59.723890 | 0.47s 2026-01-22 13:00:05.792358 | fa163e0d-6f45-64a1-ca76-0000000037eb | TASK | Enable and start tripleo-container-shutdown 2026-01-22 13:00:06.526992 | fa163e0d-6f45-64a1-ca76-0000000037eb | OK | Enable and start tripleo-container-shutdown | standalone 2026-01-22 13:00:06.530402 | fa163e0d-6f45-64a1-ca76-0000000037eb | TIMING | tripleo_container_manage : Enable and start tripleo-container-shutdown | standalone | 0:21:00.495406 | 0.74s 2026-01-22 13:00:06.561851 | fa163e0d-6f45-64a1-ca76-0000000037ec | TASK | Create /usr/lib/systemd/system/netns-placeholder.service 2026-01-22 13:00:07.071067 | fa163e0d-6f45-64a1-ca76-0000000037ec | OK | Create /usr/lib/systemd/system/netns-placeholder.service | standalone 2026-01-22 13:00:07.072752 | fa163e0d-6f45-64a1-ca76-0000000037ec | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system/netns-placeholder.service | standalone | 0:21:01.037751 | 0.51s 2026-01-22 13:00:07.108340 | fa163e0d-6f45-64a1-ca76-0000000037ed | TASK | Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset 2026-01-22 13:00:07.575410 | fa163e0d-6f45-64a1-ca76-0000000037ed | OK | Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset | standalone 2026-01-22 13:00:07.576213 | fa163e0d-6f45-64a1-ca76-0000000037ed | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset | standalone | 0:21:01.541229 | 0.47s 2026-01-22 13:00:07.595055 | fa163e0d-6f45-64a1-ca76-0000000037ee | TASK | Enable and start netns-placeholder 2026-01-22 13:00:08.368051 | fa163e0d-6f45-64a1-ca76-0000000037ee | CHANGED | Enable and start netns-placeholder | standalone 2026-01-22 13:00:08.370298 | fa163e0d-6f45-64a1-ca76-0000000037ee | TIMING | tripleo_container_manage : Enable and start netns-placeholder | standalone | 0:21:02.335303 | 0.77s 2026-01-22 13:00:08.400049 | fa163e0d-6f45-64a1-ca76-0000000037b4 | TASK | Update container configs with new config hashes 2026-01-22 13:00:08.460782 | fa163e0d-6f45-64a1-ca76-0000000037b4 | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:21:02.425779 | 0.06s 2026-01-22 13:00:08.483104 | 8e2f6618-8782-4ea6-b9f9-23ff72c4b7a9 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/puppet_config.yml | standalone 2026-01-22 13:00:08.518606 | fa163e0d-6f45-64a1-ca76-000000003816 | TASK | Update config hashes for container startup configs 2026-01-22 13:00:08.816157 | fa163e0d-6f45-64a1-ca76-000000003816 | OK | Update config hashes for container startup configs | standalone 2026-01-22 13:00:08.817865 | fa163e0d-6f45-64a1-ca76-000000003816 | TIMING | tripleo_container_manage : Update config hashes for container startup configs | standalone | 0:21:02.782862 | 0.30s 2026-01-22 13:00:08.853751 | fa163e0d-6f45-64a1-ca76-0000000037b5 | TASK | Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_3 2026-01-22 13:00:08.915605 | fa163e0d-6f45-64a1-ca76-0000000037b5 | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_3 | standalone | 0:21:02.880605 | 0.06s 2026-01-22 13:00:08.939533 | 1f7c1477-bbc3-4d12-b9f6-b88a8caf6a7a | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/delete_orphan.yml | standalone 2026-01-22 13:00:08.971982 | fa163e0d-6f45-64a1-ca76-000000003835 | TASK | Gather podman infos 2026-01-22 13:00:10.102005 | fa163e0d-6f45-64a1-ca76-000000003835 | OK | Gather podman infos | standalone 2026-01-22 13:00:10.103349 | fa163e0d-6f45-64a1-ca76-000000003835 | TIMING | tripleo_container_manage : Gather podman infos | standalone | 0:21:04.068354 | 1.13s 2026-01-22 13:00:10.279417 | fa163e0d-6f45-64a1-ca76-000000003836 | TASK | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_3 2026-01-22 13:00:10.359233 | fa163e0d-6f45-64a1-ca76-000000003836 | TIMING | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_3 | standalone | 0:21:04.324230 | 0.07s 2026-01-22 13:00:10.512879 | fa163e0d-6f45-64a1-ca76-000000003537 | TIMING | tripleo_container_rm : include_tasks | standalone | 0:21:04.477882 | 0.09s 2026-01-22 13:00:10.570802 | fa163e0d-6f45-64a1-ca76-0000000037b6 | TASK | Create containers from /var/lib/tripleo-config/container-startup-config/step_3 2026-01-22 13:00:10.626754 | fa163e0d-6f45-64a1-ca76-0000000037b6 | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-startup-config/step_3 | standalone | 0:21:04.591753 | 0.05s 2026-01-22 13:00:10.650835 | dd2e9fda-a4b0-4a8d-a55f-741ac0f04a7f | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/create.yml | standalone 2026-01-22 13:00:10.686675 | fa163e0d-6f45-64a1-ca76-0000000038a0 | TASK | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_3 2026-01-22 13:01:02.386997 | fa163e0d-6f45-64a1-ca76-0000000038a0 | CHANGED | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_3 | standalone 2026-01-22 13:01:02.388547 | fa163e0d-6f45-64a1-ca76-0000000038a0 | TIMING | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_3 | standalone | 0:21:56.353554 | 51.70s 2026-01-22 13:01:02.419767 | fa163e0d-6f45-64a1-ca76-0000000038a1 | TASK | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_3 2026-01-22 13:01:29.787473 | fa163e0d-6f45-64a1-ca76-0000000038a1 | CHANGED | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_3 | standalone 2026-01-22 13:01:29.788832 | fa163e0d-6f45-64a1-ca76-0000000038a1 | TIMING | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_3 | standalone | 0:22:23.753845 | 27.37s 2026-01-22 13:01:29.868680 | fa163e0d-6f45-64a1-ca76-000000003713 | TASK | Clean container_puppet_tasks for standalone step 3 2026-01-22 13:01:30.101376 | fa163e0d-6f45-64a1-ca76-000000003713 | OK | Clean container_puppet_tasks for standalone step 3 | standalone 2026-01-22 13:01:30.102222 | fa163e0d-6f45-64a1-ca76-000000003713 | TIMING | Clean container_puppet_tasks for standalone step 3 | standalone | 0:22:24.067238 | 0.23s 2026-01-22 13:01:30.136558 | fa163e0d-6f45-64a1-ca76-000000003714 | TASK | Calculate container_puppet_tasks for standalone step 3 2026-01-22 13:01:30.176295 | fa163e0d-6f45-64a1-ca76-000000003714 | TIMING | Calculate container_puppet_tasks for standalone step 3 | standalone | 0:22:24.141291 | 0.04s 2026-01-22 13:01:30.208683 | fa163e0d-6f45-64a1-ca76-000000003715 | TASK | Include container-puppet tasks for step 3 2026-01-22 13:01:30.238245 | fa163e0d-6f45-64a1-ca76-000000003715 | TIMING | Include container-puppet tasks for step 3 | standalone | 0:22:24.203252 | 0.03s 2026-01-22 13:01:30.253688 | 71c7f64a-a98f-4fcf-aedf-47e098a937ca | INCLUDED | /root/standalone-ansible-mz1ymllk/host-container-puppet-tasks.yaml | standalone 2026-01-22 13:01:30.290583 | fa163e0d-6f45-64a1-ca76-0000000038ec | TASK | Write container-puppet-tasks json file for standalone step 3 2026-01-22 13:01:30.742228 | fa163e0d-6f45-64a1-ca76-0000000038ec | CHANGED | Write container-puppet-tasks json file for standalone step 3 | standalone 2026-01-22 13:01:30.743333 | fa163e0d-6f45-64a1-ca76-0000000038ec | TIMING | Write container-puppet-tasks json file for standalone step 3 | standalone | 0:22:24.708347 | 0.45s 2026-01-22 13:01:30.762834 | fa163e0d-6f45-64a1-ca76-0000000038ee | TASK | Generate container puppet configs for step 3 2026-01-22 13:01:31.093874 | fa163e0d-6f45-64a1-ca76-0000000038ee | OK | Generate container puppet configs for step 3 | standalone 2026-01-22 13:01:31.094998 | fa163e0d-6f45-64a1-ca76-0000000038ee | TIMING | Generate container puppet configs for step 3 | standalone | 0:22:25.060009 | 0.33s 2026-01-22 13:01:31.118760 | fa163e0d-6f45-64a1-ca76-0000000038ef | TASK | Manage Puppet containers (bootstrap tasks) for step 3 with tripleo-ansible 2026-01-22 13:01:31.146668 | fa163e0d-6f45-64a1-ca76-0000000038ef | TIMING | Manage Puppet containers (bootstrap tasks) for step 3 with tripleo-ansible | standalone | 0:22:25.111663 | 0.03s 2026-01-22 13:01:31.201929 | fa163e0d-6f45-64a1-ca76-00000000391e | TASK | Gather variables for each operating system 2026-01-22 13:01:31.282116 | fa163e0d-6f45-64a1-ca76-00000000391e | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:22:25.247126 | 0.08s 2026-01-22 13:01:31.304375 | fa163e0d-6f45-64a1-ca76-00000000391f | TASK | Create container logs path 2026-01-22 13:01:31.533849 | fa163e0d-6f45-64a1-ca76-00000000391f | OK | Create container logs path | standalone 2026-01-22 13:01:31.535261 | fa163e0d-6f45-64a1-ca76-00000000391f | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:22:25.500269 | 0.23s 2026-01-22 13:01:31.566925 | fa163e0d-6f45-64a1-ca76-000000003921 | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_3 2026-01-22 13:01:31.810886 | fa163e0d-6f45-64a1-ca76-000000003921 | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_3 | standalone 2026-01-22 13:01:31.812059 | fa163e0d-6f45-64a1-ca76-000000003921 | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_3 | standalone | 0:22:25.777065 | 0.24s 2026-01-22 13:01:31.840613 | fa163e0d-6f45-64a1-ca76-000000003922 | TASK | Finalise hashes for all containers 2026-01-22 13:01:31.883512 | fa163e0d-6f45-64a1-ca76-000000003922 | OK | Finalise hashes for all containers | standalone 2026-01-22 13:01:31.884591 | fa163e0d-6f45-64a1-ca76-000000003922 | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:22:25.849602 | 0.04s 2026-01-22 13:01:31.913289 | fa163e0d-6f45-64a1-ca76-000000003924 | TASK | Manage systemd shutdown files 2026-01-22 13:01:31.943564 | fa163e0d-6f45-64a1-ca76-000000003924 | SKIPPED | Manage systemd shutdown files | standalone 2026-01-22 13:01:31.944838 | fa163e0d-6f45-64a1-ca76-000000003924 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:22:25.909847 | 0.03s 2026-01-22 13:01:31.973856 | fa163e0d-6f45-64a1-ca76-000000003926 | TASK | Update container configs with new config hashes 2026-01-22 13:01:32.024825 | fa163e0d-6f45-64a1-ca76-000000003926 | SKIPPED | Update container configs with new config hashes | standalone 2026-01-22 13:01:32.025977 | fa163e0d-6f45-64a1-ca76-000000003926 | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:22:25.990981 | 0.05s 2026-01-22 13:01:32.057283 | fa163e0d-6f45-64a1-ca76-000000003927 | TASK | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_3 2026-01-22 13:01:32.107997 | fa163e0d-6f45-64a1-ca76-000000003927 | SKIPPED | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_3 | standalone 2026-01-22 13:01:32.109453 | fa163e0d-6f45-64a1-ca76-000000003927 | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_3 | standalone | 0:22:26.074455 | 0.05s 2026-01-22 13:01:32.141276 | fa163e0d-6f45-64a1-ca76-000000003928 | TASK | Create containers from /var/lib/tripleo-config/container-puppet-config/step_3 2026-01-22 13:01:32.195207 | fa163e0d-6f45-64a1-ca76-000000003928 | SKIPPED | Create containers from /var/lib/tripleo-config/container-puppet-config/step_3 | standalone 2026-01-22 13:01:32.196420 | fa163e0d-6f45-64a1-ca76-000000003928 | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-puppet-config/step_3 | standalone | 0:22:26.161427 | 0.05s PLAY [External deployment step 4] ********************************************** 2026-01-22 13:01:32.357286 | fa163e0d-6f45-64a1-ca76-0000000000de | TASK | External deployment step 4 2026-01-22 13:01:32.400443 | fa163e0d-6f45-64a1-ca76-0000000000de | OK | External deployment step 4 | undercloud -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'External deployment step 4' to resume from this task" } [WARNING]: ('undercloud -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000de') missing from stats 2026-01-22 13:01:32.459349 | fa163e0d-6f45-64a1-ca76-0000000000df | TIMING | include_tasks | undercloud | 0:22:26.424354 | 0.04s 2026-01-22 13:01:32.491405 | 066878d4-04a1-4e35-aad7-a00c5004cfa6 | INCLUDED | /root/standalone-ansible-mz1ymllk/external_deploy_steps_tasks_step4.yaml | undercloud 2026-01-22 13:01:32.509763 | fa163e0d-6f45-64a1-ca76-000000003977 | TASK | Clean up legacy Cinder keystone catalog entries 2026-01-22 13:01:34.083723 | fa163e0d-6f45-64a1-ca76-000000003977 | OK | Clean up legacy Cinder keystone catalog entries | undercloud | item={'service_name': 'cinderv2', 'service_type': 'volumev2'} 2026-01-22 13:01:34.085299 | fa163e0d-6f45-64a1-ca76-000000003977 | TIMING | Clean up legacy Cinder keystone catalog entries | undercloud | 0:22:28.050294 | 1.57s 2026-01-22 13:01:35.275350 | fa163e0d-6f45-64a1-ca76-000000003977 | OK | Clean up legacy Cinder keystone catalog entries | undercloud | item={'service_name': 'cinderv3', 'service_type': 'volume'} 2026-01-22 13:01:35.277593 | fa163e0d-6f45-64a1-ca76-000000003977 | TIMING | Clean up legacy Cinder keystone catalog entries | undercloud | 0:22:29.242597 | 2.77s 2026-01-22 13:01:35.288077 | fa163e0d-6f45-64a1-ca76-000000003977 | TIMING | Clean up legacy Cinder keystone catalog entries | undercloud | 0:22:29.253078 | 2.78s 2026-01-22 13:01:35.306619 | fa163e0d-6f45-64a1-ca76-00000000397a | TASK | Manage Keystone resources for OpenStack services 2026-01-22 13:01:35.357241 | fa163e0d-6f45-64a1-ca76-00000000397a | TIMING | Manage Keystone resources for OpenStack services | undercloud | 0:22:29.322243 | 0.05s 2026-01-22 13:01:35.420857 | fa163e0d-6f45-64a1-ca76-0000000039a6 | TASK | Create Keystone Admin resources 2026-01-22 13:01:35.470997 | fa163e0d-6f45-64a1-ca76-0000000039a6 | TIMING | tripleo_keystone_resources : Create Keystone Admin resources | undercloud | 0:22:29.435996 | 0.05s 2026-01-22 13:01:35.508720 | b9da80c2-daa0-4469-8eb9-f09439467ed4 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/admin.yml | undercloud 2026-01-22 13:01:35.536390 | fa163e0d-6f45-64a1-ca76-0000000039e7 | TASK | Create admin and service projects 2026-01-22 13:01:35.588768 | fa163e0d-6f45-64a1-ca76-0000000039e7 | TIMING | tripleo_keystone_resources : Create admin and service projects | undercloud | 0:22:29.553766 | 0.05s 2026-01-22 13:01:35.605114 | df207f62-0360-4c8a-8f05-66e137d91bf4 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/projects.yml | undercloud 2026-01-22 13:01:35.640258 | fa163e0d-6f45-64a1-ca76-000000003a38 | TASK | Async creation of Keystone project 2026-01-22 13:01:36.074144 | fa163e0d-6f45-64a1-ca76-000000003a38 | CHANGED | Async creation of Keystone project | undercloud | item=admin 2026-01-22 13:01:36.076907 | fa163e0d-6f45-64a1-ca76-000000003a38 | TIMING | tripleo_keystone_resources : Async creation of Keystone project | undercloud | 0:22:30.041899 | 0.44s 2026-01-22 13:01:36.344253 | fa163e0d-6f45-64a1-ca76-000000003a38 | CHANGED | Async creation of Keystone project | undercloud | item=service 2026-01-22 13:01:36.346003 | fa163e0d-6f45-64a1-ca76-000000003a38 | TIMING | tripleo_keystone_resources : Async creation of Keystone project | undercloud | 0:22:30.311013 | 0.70s 2026-01-22 13:01:36.356321 | fa163e0d-6f45-64a1-ca76-000000003a38 | TIMING | tripleo_keystone_resources : Async creation of Keystone project | undercloud | 0:22:30.321332 | 0.71s 2026-01-22 13:01:36.376594 | fa163e0d-6f45-64a1-ca76-000000003a3a | TASK | Check Keystone project status 2026-01-22 13:01:36.656972 | fa163e0d-6f45-64a1-ca76-000000003a3a | WAITING | Check Keystone project status | undercloud | 30 retries left 2026-01-22 13:01:41.879636 | fa163e0d-6f45-64a1-ca76-000000003a3a | OK | Check Keystone project status | undercloud | item=admin 2026-01-22 13:01:41.881889 | fa163e0d-6f45-64a1-ca76-000000003a3a | TIMING | tripleo_keystone_resources : Check Keystone project status | undercloud | 0:22:35.846891 | 5.50s 2026-01-22 13:01:42.136722 | fa163e0d-6f45-64a1-ca76-000000003a3a | CHANGED | Check Keystone project status | undercloud | item=service 2026-01-22 13:01:42.137903 | fa163e0d-6f45-64a1-ca76-000000003a3a | TIMING | tripleo_keystone_resources : Check Keystone project status | undercloud | 0:22:36.102914 | 5.76s 2026-01-22 13:01:42.150529 | fa163e0d-6f45-64a1-ca76-000000003a3a | TIMING | tripleo_keystone_resources : Check Keystone project status | undercloud | 0:22:36.115541 | 5.77s 2026-01-22 13:01:42.170709 | fa163e0d-6f45-64a1-ca76-0000000039e8 | TASK | Create admin role 2026-01-22 13:01:43.464927 | fa163e0d-6f45-64a1-ca76-0000000039e8 | OK | Create admin role | undercloud 2026-01-22 13:01:43.466265 | fa163e0d-6f45-64a1-ca76-0000000039e8 | TIMING | tripleo_keystone_resources : Create admin role | undercloud | 0:22:37.431274 | 1.29s 2026-01-22 13:01:43.485351 | fa163e0d-6f45-64a1-ca76-0000000039e9 | TASK | Create _member_ role 2026-01-22 13:01:43.547154 | fa163e0d-6f45-64a1-ca76-0000000039e9 | SKIPPED | Create _member_ role | undercloud 2026-01-22 13:01:43.548456 | fa163e0d-6f45-64a1-ca76-0000000039e9 | TIMING | tripleo_keystone_resources : Create _member_ role | undercloud | 0:22:37.513461 | 0.06s 2026-01-22 13:01:43.567907 | fa163e0d-6f45-64a1-ca76-0000000039ea | TASK | Check admin password 2026-01-22 13:01:44.896819 | fa163e0d-6f45-64a1-ca76-0000000039ea | OK | Check admin password | undercloud 2026-01-22 13:01:44.898261 | fa163e0d-6f45-64a1-ca76-0000000039ea | TIMING | tripleo_keystone_resources : Check admin password | undercloud | 0:22:38.863267 | 1.33s 2026-01-22 13:01:44.918152 | fa163e0d-6f45-64a1-ca76-0000000039eb | TASK | Create admin user 2026-01-22 13:01:46.648375 | fa163e0d-6f45-64a1-ca76-0000000039eb | CHANGED | Create admin user | undercloud 2026-01-22 13:01:46.649720 | fa163e0d-6f45-64a1-ca76-0000000039eb | TIMING | tripleo_keystone_resources : Create admin user | undercloud | 0:22:40.614727 | 1.73s 2026-01-22 13:01:46.669480 | fa163e0d-6f45-64a1-ca76-0000000039ec | TASK | Assign admin role to admin project for admin user 2026-01-22 13:01:48.240331 | fa163e0d-6f45-64a1-ca76-0000000039ec | OK | Assign admin role to admin project for admin user | undercloud 2026-01-22 13:01:48.242094 | fa163e0d-6f45-64a1-ca76-0000000039ec | TIMING | tripleo_keystone_resources : Assign admin role to admin project for admin user | undercloud | 0:22:42.207108 | 1.57s 2026-01-22 13:01:48.258751 | fa163e0d-6f45-64a1-ca76-0000000039ed | TASK | Assign _member_ role to admin project for admin user 2026-01-22 13:01:48.323944 | fa163e0d-6f45-64a1-ca76-0000000039ed | SKIPPED | Assign _member_ role to admin project for admin user | undercloud 2026-01-22 13:01:48.325737 | fa163e0d-6f45-64a1-ca76-0000000039ed | TIMING | tripleo_keystone_resources : Assign _member_ role to admin project for admin user | undercloud | 0:22:42.290749 | 0.07s 2026-01-22 13:01:48.342512 | fa163e0d-6f45-64a1-ca76-0000000039ee | TASK | Create identity service 2026-01-22 13:01:49.621432 | fa163e0d-6f45-64a1-ca76-0000000039ee | OK | Create identity service | undercloud 2026-01-22 13:01:49.623524 | fa163e0d-6f45-64a1-ca76-0000000039ee | TIMING | tripleo_keystone_resources : Create identity service | undercloud | 0:22:43.588484 | 1.28s 2026-01-22 13:01:49.639964 | fa163e0d-6f45-64a1-ca76-0000000039ef | TASK | Create identity public endpoint 2026-01-22 13:01:50.917190 | fa163e0d-6f45-64a1-ca76-0000000039ef | OK | Create identity public endpoint | undercloud 2026-01-22 13:01:50.918813 | fa163e0d-6f45-64a1-ca76-0000000039ef | TIMING | tripleo_keystone_resources : Create identity public endpoint | undercloud | 0:22:44.883828 | 1.28s 2026-01-22 13:01:50.934836 | fa163e0d-6f45-64a1-ca76-0000000039f0 | TASK | Create identity internal endpoint 2026-01-22 13:01:52.110566 | fa163e0d-6f45-64a1-ca76-0000000039f0 | OK | Create identity internal endpoint | undercloud 2026-01-22 13:01:52.111995 | fa163e0d-6f45-64a1-ca76-0000000039f0 | TIMING | tripleo_keystone_resources : Create identity internal endpoint | undercloud | 0:22:46.077002 | 1.18s 2026-01-22 13:01:52.131074 | fa163e0d-6f45-64a1-ca76-0000000039f1 | TASK | Create identity admin endpoint 2026-01-22 13:01:53.327072 | fa163e0d-6f45-64a1-ca76-0000000039f1 | OK | Create identity admin endpoint | undercloud 2026-01-22 13:01:53.327957 | fa163e0d-6f45-64a1-ca76-0000000039f1 | TIMING | tripleo_keystone_resources : Create identity admin endpoint | undercloud | 0:22:47.292967 | 1.20s 2026-01-22 13:01:53.342236 | fa163e0d-6f45-64a1-ca76-0000000039a7 | TASK | Create Keystone Projects 2026-01-22 13:01:53.425919 | fa163e0d-6f45-64a1-ca76-0000000039a7 | TIMING | tripleo_keystone_resources : Create Keystone Projects | undercloud | 0:22:47.390911 | 0.08s 2026-01-22 13:01:53.436022 | fa163e0d-6f45-64a1-ca76-0000000039a7 | TIMING | tripleo_keystone_resources : Create Keystone Projects | undercloud | 0:22:47.401035 | 0.09s 2026-01-22 13:01:53.442934 | 3b5c3ea1-3db2-471d-a73d-a9f91357081c | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/projects.yml | undercloud 2026-01-22 13:01:53.471220 | fa163e0d-6f45-64a1-ca76-000000003ac4 | TASK | Async creation of Keystone project 2026-01-22 13:01:53.763441 | fa163e0d-6f45-64a1-ca76-000000003ac4 | CHANGED | Async creation of Keystone project | undercloud | item=service 2026-01-22 13:01:53.765921 | fa163e0d-6f45-64a1-ca76-000000003ac4 | TIMING | tripleo_keystone_resources : Async creation of Keystone project | undercloud | 0:22:47.730926 | 0.29s 2026-01-22 13:01:53.776695 | fa163e0d-6f45-64a1-ca76-000000003ac4 | TIMING | tripleo_keystone_resources : Async creation of Keystone project | undercloud | 0:22:47.741665 | 0.30s 2026-01-22 13:01:53.795875 | fa163e0d-6f45-64a1-ca76-000000003ac6 | TASK | Check Keystone project status 2026-01-22 13:01:54.055900 | fa163e0d-6f45-64a1-ca76-000000003ac6 | WAITING | Check Keystone project status | undercloud | 30 retries left 2026-01-22 13:01:59.256232 | fa163e0d-6f45-64a1-ca76-000000003ac6 | OK | Check Keystone project status | undercloud | item=service 2026-01-22 13:01:59.257783 | fa163e0d-6f45-64a1-ca76-000000003ac6 | TIMING | tripleo_keystone_resources : Check Keystone project status | undercloud | 0:22:53.222795 | 5.46s 2026-01-22 13:01:59.268078 | fa163e0d-6f45-64a1-ca76-000000003ac6 | TIMING | tripleo_keystone_resources : Check Keystone project status | undercloud | 0:22:53.233087 | 5.47s 2026-01-22 13:01:59.284098 | fa163e0d-6f45-64a1-ca76-0000000039a9 | TASK | Create Keystone Domains 2026-01-22 13:01:59.373231 | fa163e0d-6f45-64a1-ca76-0000000039a9 | TIMING | tripleo_keystone_resources : Create Keystone Domains | undercloud | 0:22:53.338219 | 0.09s 2026-01-22 13:01:59.385388 | fa163e0d-6f45-64a1-ca76-0000000039a9 | TIMING | tripleo_keystone_resources : Create Keystone Domains | undercloud | 0:22:53.350391 | 0.10s 2026-01-22 13:01:59.400466 | 2539374d-681e-4aa1-8e8b-160296ed3b53 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/domains.yml | undercloud 2026-01-22 13:01:59.446367 | fa163e0d-6f45-64a1-ca76-000000003b79 | TASK | Async creation of Keystone domains 2026-01-22 13:01:59.888030 | fa163e0d-6f45-64a1-ca76-000000003b79 | CHANGED | Async creation of Keystone domains | undercloud | item=heat_stack 2026-01-22 13:01:59.890075 | fa163e0d-6f45-64a1-ca76-000000003b79 | TIMING | tripleo_keystone_resources : Async creation of Keystone domains | undercloud | 0:22:53.855078 | 0.44s 2026-01-22 13:01:59.900161 | fa163e0d-6f45-64a1-ca76-000000003b79 | TIMING | tripleo_keystone_resources : Async creation of Keystone domains | undercloud | 0:22:53.865164 | 0.45s 2026-01-22 13:01:59.919101 | fa163e0d-6f45-64a1-ca76-000000003b7b | TASK | Check Keystone domain status 2026-01-22 13:02:00.179218 | fa163e0d-6f45-64a1-ca76-000000003b7b | WAITING | Check Keystone domain status | undercloud | 30 retries left 2026-01-22 13:02:05.391393 | fa163e0d-6f45-64a1-ca76-000000003b7b | CHANGED | Check Keystone domain status | undercloud | item=heat_stack 2026-01-22 13:02:05.393189 | fa163e0d-6f45-64a1-ca76-000000003b7b | TIMING | tripleo_keystone_resources : Check Keystone domain status | undercloud | 0:22:59.358202 | 5.47s 2026-01-22 13:02:05.403483 | fa163e0d-6f45-64a1-ca76-000000003b7b | TIMING | tripleo_keystone_resources : Check Keystone domain status | undercloud | 0:22:59.368488 | 5.48s 2026-01-22 13:02:05.423077 | fa163e0d-6f45-64a1-ca76-0000000039ab | TASK | Gather all OpenStack domains for Ansible >= 2.9.0 2026-01-22 13:02:05.475398 | fa163e0d-6f45-64a1-ca76-0000000039ab | TIMING | tripleo_keystone_resources : Gather all OpenStack domains for Ansible >= 2.9.0 | undercloud | 0:22:59.440397 | 0.05s 2026-01-22 13:02:05.489036 | 7fc2d5c0-90cc-4a59-a466-3c145b62c147 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/gather_domains.yml | undercloud 2026-01-22 13:02:05.551487 | fa163e0d-6f45-64a1-ca76-000000003c43 | TASK | Collect OpenStack Keystone domains infos 2026-01-22 13:02:06.808255 | fa163e0d-6f45-64a1-ca76-000000003c43 | OK | Collect OpenStack Keystone domains infos | undercloud 2026-01-22 13:02:06.809630 | fa163e0d-6f45-64a1-ca76-000000003c43 | TIMING | tripleo_keystone_resources : Collect OpenStack Keystone domains infos | undercloud | 0:23:00.774637 | 1.26s 2026-01-22 13:02:06.828196 | fa163e0d-6f45-64a1-ca76-000000003c44 | TASK | Set openstack_domains fact 2026-01-22 13:02:06.892118 | fa163e0d-6f45-64a1-ca76-000000003c44 | OK | Set openstack_domains fact | undercloud 2026-01-22 13:02:06.893022 | fa163e0d-6f45-64a1-ca76-000000003c44 | TIMING | tripleo_keystone_resources : Set openstack_domains fact | undercloud | 0:23:00.858040 | 0.06s 2026-01-22 13:02:06.910256 | fa163e0d-6f45-64a1-ca76-0000000039ac | TASK | Create Keystone Services 2026-01-22 13:02:07.015321 | fa163e0d-6f45-64a1-ca76-0000000039ac | TIMING | tripleo_keystone_resources : Create Keystone Services | undercloud | 0:23:00.980329 | 0.10s 2026-01-22 13:02:07.065598 | fa163e0d-6f45-64a1-ca76-0000000039ac | TIMING | tripleo_keystone_resources : Create Keystone Services | undercloud | 0:23:01.030601 | 0.15s 2026-01-22 13:02:07.078027 | fa163e0d-6f45-64a1-ca76-0000000039ac | TIMING | tripleo_keystone_resources : Create Keystone Services | undercloud | 0:23:01.043028 | 0.17s 2026-01-22 13:02:07.102067 | 7b9c5e1d-86ac-4dc9-8ae4-7bd1f7c4ec92 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/services.yml | undercloud 2026-01-22 13:02:07.107998 | f36eef9d-7c25-4ead-bed2-64d6c99e533e | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/services.yml | undercloud 2026-01-22 13:02:07.151143 | fa163e0d-6f45-64a1-ca76-000000003d20 | TASK | Async creation of Keystone service 2026-01-22 13:02:07.518585 | fa163e0d-6f45-64a1-ca76-000000003d20 | CHANGED | Async creation of Keystone service | undercloud | item=aodh 2026-01-22 13:02:07.520581 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:01.485583 | 0.37s 2026-01-22 13:02:07.787753 | fa163e0d-6f45-64a1-ca76-000000003d20 | CHANGED | Async creation of Keystone service | undercloud | item=cinderv3 2026-01-22 13:02:07.789398 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:01.754408 | 0.64s 2026-01-22 13:02:08.082276 | fa163e0d-6f45-64a1-ca76-000000003d20 | CHANGED | Async creation of Keystone service | undercloud | item=glance 2026-01-22 13:02:08.083549 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:02.048560 | 0.93s 2026-01-22 13:02:08.379744 | fa163e0d-6f45-64a1-ca76-000000003d20 | CHANGED | Async creation of Keystone service | undercloud | item=gnocchi 2026-01-22 13:02:08.380729 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:02.345739 | 1.23s 2026-01-22 13:02:08.718663 | fa163e0d-6f45-64a1-ca76-000000003d20 | CHANGED | Async creation of Keystone service | undercloud | item=heat 2026-01-22 13:02:08.719279 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:02.684298 | 1.57s 2026-01-22 13:02:09.021849 | fa163e0d-6f45-64a1-ca76-000000003d20 | CHANGED | Async creation of Keystone service | undercloud | item=heat-cfn 2026-01-22 13:02:09.022412 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:02.987430 | 1.87s 2026-01-22 13:02:09.353047 | fa163e0d-6f45-64a1-ca76-000000003d20 | CHANGED | Async creation of Keystone service | undercloud | item=manila 2026-01-22 13:02:09.354006 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:03.319017 | 2.20s 2026-01-22 13:02:09.622029 | fa163e0d-6f45-64a1-ca76-000000003d20 | CHANGED | Async creation of Keystone service | undercloud | item=manilav2 2026-01-22 13:02:09.623027 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:03.588037 | 2.47s 2026-01-22 13:02:09.931830 | fa163e0d-6f45-64a1-ca76-000000003d20 | CHANGED | Async creation of Keystone service | undercloud | item=neutron 2026-01-22 13:02:09.932740 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:03.897751 | 2.78s 2026-01-22 13:02:10.224399 | fa163e0d-6f45-64a1-ca76-000000003d20 | CHANGED | Async creation of Keystone service | undercloud | item=nova 2026-01-22 13:02:10.225429 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:04.190438 | 3.07s 2026-01-22 13:02:10.233493 | fa163e0d-6f45-64a1-ca76-000000003d20 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:04.198494 | 3.08s 2026-01-22 13:02:10.256333 | fa163e0d-6f45-64a1-ca76-000000003d22 | TASK | Check Keystone service status 2026-01-22 13:02:10.494591 | fa163e0d-6f45-64a1-ca76-000000003d22 | CHANGED | Check Keystone service status | undercloud | item=aodh 2026-01-22 13:02:10.496899 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:04.461912 | 0.24s 2026-01-22 13:02:10.767367 | fa163e0d-6f45-64a1-ca76-000000003d22 | CHANGED | Check Keystone service status | undercloud | item=cinderv3 2026-01-22 13:02:10.768521 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:04.733530 | 0.51s 2026-01-22 13:02:10.987249 | fa163e0d-6f45-64a1-ca76-000000003d22 | CHANGED | Check Keystone service status | undercloud | item=glance 2026-01-22 13:02:10.989131 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:04.954140 | 0.73s 2026-01-22 13:02:11.221283 | fa163e0d-6f45-64a1-ca76-000000003d22 | CHANGED | Check Keystone service status | undercloud | item=gnocchi 2026-01-22 13:02:11.222383 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:05.187393 | 0.96s 2026-01-22 13:02:11.472810 | fa163e0d-6f45-64a1-ca76-000000003d22 | CHANGED | Check Keystone service status | undercloud | item=heat 2026-01-22 13:02:11.474499 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:05.439502 | 1.22s 2026-01-22 13:02:11.729941 | fa163e0d-6f45-64a1-ca76-000000003d22 | CHANGED | Check Keystone service status | undercloud | item=heat-cfn 2026-01-22 13:02:11.731681 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:05.696663 | 1.47s 2026-01-22 13:02:11.991557 | fa163e0d-6f45-64a1-ca76-000000003d22 | CHANGED | Check Keystone service status | undercloud | item=manila 2026-01-22 13:02:11.992691 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:05.957699 | 1.73s 2026-01-22 13:02:12.203050 | fa163e0d-6f45-64a1-ca76-000000003d22 | CHANGED | Check Keystone service status | undercloud | item=manilav2 2026-01-22 13:02:12.204204 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:06.169213 | 1.95s 2026-01-22 13:02:12.419329 | fa163e0d-6f45-64a1-ca76-000000003d22 | CHANGED | Check Keystone service status | undercloud | item=neutron 2026-01-22 13:02:12.420948 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:06.385957 | 2.16s 2026-01-22 13:02:12.673495 | fa163e0d-6f45-64a1-ca76-000000003d22 | CHANGED | Check Keystone service status | undercloud | item=nova 2026-01-22 13:02:12.674711 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:06.639721 | 2.42s 2026-01-22 13:02:12.686346 | fa163e0d-6f45-64a1-ca76-000000003d22 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:06.651357 | 2.43s 2026-01-22 13:02:12.711280 | fa163e0d-6f45-64a1-ca76-000000003d27 | TASK | Async creation of Keystone service 2026-01-22 13:02:13.030701 | fa163e0d-6f45-64a1-ca76-000000003d27 | CHANGED | Async creation of Keystone service | undercloud | item=octavia 2026-01-22 13:02:13.032539 | fa163e0d-6f45-64a1-ca76-000000003d27 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:06.997548 | 0.32s 2026-01-22 13:02:13.330999 | fa163e0d-6f45-64a1-ca76-000000003d27 | CHANGED | Async creation of Keystone service | undercloud | item=placement 2026-01-22 13:02:13.331980 | fa163e0d-6f45-64a1-ca76-000000003d27 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:07.296991 | 0.62s 2026-01-22 13:02:13.615602 | fa163e0d-6f45-64a1-ca76-000000003d27 | CHANGED | Async creation of Keystone service | undercloud | item=swift 2026-01-22 13:02:13.616315 | fa163e0d-6f45-64a1-ca76-000000003d27 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:07.581332 | 0.90s 2026-01-22 13:02:13.624330 | fa163e0d-6f45-64a1-ca76-000000003d27 | TIMING | tripleo_keystone_resources : Async creation of Keystone service | undercloud | 0:23:07.589342 | 0.91s 2026-01-22 13:02:13.643233 | fa163e0d-6f45-64a1-ca76-000000003d29 | TASK | Check Keystone service status 2026-01-22 13:02:13.889010 | fa163e0d-6f45-64a1-ca76-000000003d29 | WAITING | Check Keystone service status | undercloud | 30 retries left 2026-01-22 13:02:19.110512 | fa163e0d-6f45-64a1-ca76-000000003d29 | CHANGED | Check Keystone service status | undercloud | item=octavia 2026-01-22 13:02:19.113308 | fa163e0d-6f45-64a1-ca76-000000003d29 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:13.078302 | 5.47s 2026-01-22 13:02:19.396894 | fa163e0d-6f45-64a1-ca76-000000003d29 | CHANGED | Check Keystone service status | undercloud | item=placement 2026-01-22 13:02:19.397563 | fa163e0d-6f45-64a1-ca76-000000003d29 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:13.362581 | 5.75s 2026-01-22 13:02:19.638998 | fa163e0d-6f45-64a1-ca76-000000003d29 | CHANGED | Check Keystone service status | undercloud | item=swift 2026-01-22 13:02:19.639726 | fa163e0d-6f45-64a1-ca76-000000003d29 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:13.604744 | 6.00s 2026-01-22 13:02:19.646586 | fa163e0d-6f45-64a1-ca76-000000003d29 | TIMING | tripleo_keystone_resources : Check Keystone service status | undercloud | 0:23:13.611588 | 6.00s 2026-01-22 13:02:19.666894 | fa163e0d-6f45-64a1-ca76-0000000039ae | TASK | Create Keystone Endpoints 2026-01-22 13:02:19.743497 | fa163e0d-6f45-64a1-ca76-0000000039ae | TIMING | tripleo_keystone_resources : Create Keystone Endpoints | undercloud | 0:23:13.708486 | 0.07s 2026-01-22 13:02:19.774622 | fa163e0d-6f45-64a1-ca76-0000000039ae | TIMING | tripleo_keystone_resources : Create Keystone Endpoints | undercloud | 0:23:13.739628 | 0.11s 2026-01-22 13:02:19.800468 | fa163e0d-6f45-64a1-ca76-0000000039ae | TIMING | tripleo_keystone_resources : Create Keystone Endpoints | undercloud | 0:23:13.765471 | 0.13s 2026-01-22 13:02:19.811563 | fa163e0d-6f45-64a1-ca76-0000000039ae | TIMING | tripleo_keystone_resources : Create Keystone Endpoints | undercloud | 0:23:13.776573 | 0.14s 2026-01-22 13:02:19.818200 | 7e08a58a-b46e-4354-a2b4-f5720222f4bc | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/loop-endpoints.yml | undercloud 2026-01-22 13:02:19.821281 | 0891350e-9b34-42aa-83a0-24f4b9f85b6d | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/loop-endpoints.yml | undercloud 2026-01-22 13:02:19.823839 | f9898cd7-d71e-4c16-9a7e-52b59e545066 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/loop-endpoints.yml | undercloud 2026-01-22 13:02:19.865311 | fa163e0d-6f45-64a1-ca76-000000003e59 | TASK | Create Keystone public endpoints 2026-01-22 13:02:19.964017 | fa163e0d-6f45-64a1-ca76-000000003e59 | TIMING | tripleo_keystone_resources : Create Keystone public endpoints | undercloud | 0:23:13.929004 | 0.10s 2026-01-22 13:02:19.981547 | fa163e0d-6f45-64a1-ca76-000000003e59 | TIMING | tripleo_keystone_resources : Create Keystone public endpoints | undercloud | 0:23:13.946559 | 0.12s 2026-01-22 13:02:19.996405 | fa163e0d-6f45-64a1-ca76-000000003e59 | TIMING | tripleo_keystone_resources : Create Keystone public endpoints | undercloud | 0:23:13.961408 | 0.13s 2026-01-22 13:02:20.015111 | 96b619bf-30c1-4e58-8e80-5bab89a064e3 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/endpoints.yml | undercloud 2026-01-22 13:02:20.024575 | b43aad3e-2e56-4c16-8660-37f87d480751 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/endpoints.yml | undercloud 2026-01-22 13:02:20.051069 | fa163e0d-6f45-64a1-ca76-000000003f81 | TASK | Async creation of Keystone public endpoint 2026-01-22 13:02:20.387465 | fa163e0d-6f45-64a1-ca76-000000003f81 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=aodh 2026-01-22 13:02:20.389857 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:14.354858 | 0.34s 2026-01-22 13:02:20.643265 | fa163e0d-6f45-64a1-ca76-000000003f81 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=cinderv3 2026-01-22 13:02:20.644488 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:14.609492 | 0.59s 2026-01-22 13:02:20.929766 | fa163e0d-6f45-64a1-ca76-000000003f81 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=glance 2026-01-22 13:02:20.930418 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:14.895436 | 0.88s 2026-01-22 13:02:21.223728 | fa163e0d-6f45-64a1-ca76-000000003f81 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=gnocchi 2026-01-22 13:02:21.225478 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:15.190487 | 1.17s 2026-01-22 13:02:21.483082 | fa163e0d-6f45-64a1-ca76-000000003f81 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=heat 2026-01-22 13:02:21.483717 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:15.448735 | 1.43s 2026-01-22 13:02:21.760529 | fa163e0d-6f45-64a1-ca76-000000003f81 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=heat-cfn 2026-01-22 13:02:21.761803 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:15.726821 | 1.71s 2026-01-22 13:02:22.062727 | fa163e0d-6f45-64a1-ca76-000000003f81 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=manila 2026-01-22 13:02:22.063799 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:16.028809 | 2.01s 2026-01-22 13:02:22.393113 | fa163e0d-6f45-64a1-ca76-000000003f81 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=manilav2 2026-01-22 13:02:22.394497 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:16.359514 | 2.34s 2026-01-22 13:02:22.732123 | fa163e0d-6f45-64a1-ca76-000000003f81 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=neutron 2026-01-22 13:02:22.734106 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:16.699120 | 2.68s 2026-01-22 13:02:23.004492 | fa163e0d-6f45-64a1-ca76-000000003f81 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=nova 2026-01-22 13:02:23.005378 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:16.970392 | 2.95s 2026-01-22 13:02:23.013517 | fa163e0d-6f45-64a1-ca76-000000003f81 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:16.978523 | 2.96s 2026-01-22 13:02:23.039430 | fa163e0d-6f45-64a1-ca76-000000003f83 | TASK | Check Keystone public endpoint status 2026-01-22 13:02:23.302304 | fa163e0d-6f45-64a1-ca76-000000003f83 | CHANGED | Check Keystone public endpoint status | undercloud | item=aodh 2026-01-22 13:02:23.305024 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:17.270025 | 0.26s 2026-01-22 13:02:23.520173 | fa163e0d-6f45-64a1-ca76-000000003f83 | CHANGED | Check Keystone public endpoint status | undercloud | item=cinderv3 2026-01-22 13:02:23.521831 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:17.486840 | 0.48s 2026-01-22 13:02:23.788155 | fa163e0d-6f45-64a1-ca76-000000003f83 | CHANGED | Check Keystone public endpoint status | undercloud | item=glance 2026-01-22 13:02:23.789933 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:17.754935 | 0.75s 2026-01-22 13:02:24.052800 | fa163e0d-6f45-64a1-ca76-000000003f83 | CHANGED | Check Keystone public endpoint status | undercloud | item=gnocchi 2026-01-22 13:02:24.053527 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:18.018545 | 1.01s 2026-01-22 13:02:24.320748 | fa163e0d-6f45-64a1-ca76-000000003f83 | CHANGED | Check Keystone public endpoint status | undercloud | item=heat 2026-01-22 13:02:24.322795 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:18.287804 | 1.28s 2026-01-22 13:02:24.604691 | fa163e0d-6f45-64a1-ca76-000000003f83 | CHANGED | Check Keystone public endpoint status | undercloud | item=heat-cfn 2026-01-22 13:02:24.605552 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:18.570567 | 1.56s 2026-01-22 13:02:24.862956 | fa163e0d-6f45-64a1-ca76-000000003f83 | CHANGED | Check Keystone public endpoint status | undercloud | item=manila 2026-01-22 13:02:24.864984 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:18.829992 | 1.82s 2026-01-22 13:02:25.142156 | fa163e0d-6f45-64a1-ca76-000000003f83 | CHANGED | Check Keystone public endpoint status | undercloud | item=manilav2 2026-01-22 13:02:25.142935 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:19.107953 | 2.10s 2026-01-22 13:02:25.425714 | fa163e0d-6f45-64a1-ca76-000000003f83 | CHANGED | Check Keystone public endpoint status | undercloud | item=neutron 2026-01-22 13:02:25.426844 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:19.391854 | 2.39s 2026-01-22 13:02:25.674769 | fa163e0d-6f45-64a1-ca76-000000003f83 | CHANGED | Check Keystone public endpoint status | undercloud | item=nova 2026-01-22 13:02:25.676001 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:19.641011 | 2.63s 2026-01-22 13:02:25.687849 | fa163e0d-6f45-64a1-ca76-000000003f83 | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:19.652850 | 2.65s 2026-01-22 13:02:25.716907 | fa163e0d-6f45-64a1-ca76-000000003f88 | TASK | Async creation of Keystone public endpoint 2026-01-22 13:02:26.102761 | fa163e0d-6f45-64a1-ca76-000000003f88 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=octavia 2026-01-22 13:02:26.105595 | fa163e0d-6f45-64a1-ca76-000000003f88 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:20.070588 | 0.39s 2026-01-22 13:02:26.382418 | fa163e0d-6f45-64a1-ca76-000000003f88 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=placement 2026-01-22 13:02:26.383478 | fa163e0d-6f45-64a1-ca76-000000003f88 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:20.348488 | 0.66s 2026-01-22 13:02:26.680950 | fa163e0d-6f45-64a1-ca76-000000003f88 | CHANGED | Async creation of Keystone public endpoint | undercloud | item=swift 2026-01-22 13:02:26.682149 | fa163e0d-6f45-64a1-ca76-000000003f88 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:20.647164 | 0.96s 2026-01-22 13:02:26.692389 | fa163e0d-6f45-64a1-ca76-000000003f88 | TIMING | tripleo_keystone_resources : Async creation of Keystone public endpoint | undercloud | 0:23:20.657403 | 0.97s 2026-01-22 13:02:26.708142 | fa163e0d-6f45-64a1-ca76-000000003f8a | TASK | Check Keystone public endpoint status 2026-01-22 13:02:26.985131 | fa163e0d-6f45-64a1-ca76-000000003f8a | WAITING | Check Keystone public endpoint status | undercloud | 30 retries left 2026-01-22 13:02:32.195091 | fa163e0d-6f45-64a1-ca76-000000003f8a | CHANGED | Check Keystone public endpoint status | undercloud | item=octavia 2026-01-22 13:02:32.196367 | fa163e0d-6f45-64a1-ca76-000000003f8a | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:26.161369 | 5.49s 2026-01-22 13:02:32.419958 | fa163e0d-6f45-64a1-ca76-000000003f8a | CHANGED | Check Keystone public endpoint status | undercloud | item=placement 2026-01-22 13:02:32.420571 | fa163e0d-6f45-64a1-ca76-000000003f8a | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:26.385590 | 5.71s 2026-01-22 13:02:32.683559 | fa163e0d-6f45-64a1-ca76-000000003f8a | CHANGED | Check Keystone public endpoint status | undercloud | item=swift 2026-01-22 13:02:32.684220 | fa163e0d-6f45-64a1-ca76-000000003f8a | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:26.649240 | 5.98s 2026-01-22 13:02:32.691371 | fa163e0d-6f45-64a1-ca76-000000003f8a | TIMING | tripleo_keystone_resources : Check Keystone public endpoint status | undercloud | 0:23:26.656378 | 5.98s 2026-01-22 13:02:32.713793 | fa163e0d-6f45-64a1-ca76-000000003e5e | TASK | Create Keystone admin endpoints 2026-01-22 13:02:32.799859 | fa163e0d-6f45-64a1-ca76-000000003e5e | TIMING | tripleo_keystone_resources : Create Keystone admin endpoints | undercloud | 0:23:26.764859 | 0.08s 2026-01-22 13:02:32.821155 | fa163e0d-6f45-64a1-ca76-000000003e5e | TIMING | tripleo_keystone_resources : Create Keystone admin endpoints | undercloud | 0:23:26.786171 | 0.11s 2026-01-22 13:02:32.828042 | fa163e0d-6f45-64a1-ca76-000000003e5e | TIMING | tripleo_keystone_resources : Create Keystone admin endpoints | undercloud | 0:23:26.793054 | 0.11s 2026-01-22 13:02:32.844562 | eeb41035-b50d-4e37-88b3-34b207557185 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/endpoints.yml | undercloud 2026-01-22 13:02:32.854373 | 2b22ab90-2646-4100-9503-689962f2ea16 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/endpoints.yml | undercloud 2026-01-22 13:02:32.879184 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TASK | Async creation of Keystone admin endpoint 2026-01-22 13:02:33.250345 | fa163e0d-6f45-64a1-ca76-000000003fe2 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=aodh 2026-01-22 13:02:33.251987 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:27.216991 | 0.37s 2026-01-22 13:02:33.515277 | fa163e0d-6f45-64a1-ca76-000000003fe2 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=cinderv3 2026-01-22 13:02:33.516293 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:27.481304 | 0.64s 2026-01-22 13:02:33.817390 | fa163e0d-6f45-64a1-ca76-000000003fe2 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=glance 2026-01-22 13:02:33.818400 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:27.783411 | 0.94s 2026-01-22 13:02:34.121794 | fa163e0d-6f45-64a1-ca76-000000003fe2 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=gnocchi 2026-01-22 13:02:34.122355 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:28.087374 | 1.24s 2026-01-22 13:02:34.445774 | fa163e0d-6f45-64a1-ca76-000000003fe2 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=heat 2026-01-22 13:02:34.446877 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:28.411888 | 1.57s 2026-01-22 13:02:34.724928 | fa163e0d-6f45-64a1-ca76-000000003fe2 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=heat-cfn 2026-01-22 13:02:34.725929 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:28.690940 | 1.85s 2026-01-22 13:02:35.012098 | fa163e0d-6f45-64a1-ca76-000000003fe2 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=manila 2026-01-22 13:02:35.013131 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:28.978142 | 2.13s 2026-01-22 13:02:35.300024 | fa163e0d-6f45-64a1-ca76-000000003fe2 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=manilav2 2026-01-22 13:02:35.300584 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:29.265603 | 2.42s 2026-01-22 13:02:35.656821 | fa163e0d-6f45-64a1-ca76-000000003fe2 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=neutron 2026-01-22 13:02:35.657377 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:29.622396 | 2.78s 2026-01-22 13:02:35.933714 | fa163e0d-6f45-64a1-ca76-000000003fe2 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=nova 2026-01-22 13:02:35.934714 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:29.899724 | 3.05s 2026-01-22 13:02:35.942098 | fa163e0d-6f45-64a1-ca76-000000003fe2 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:29.907111 | 3.06s 2026-01-22 13:02:35.964736 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TASK | Check Keystone admin endpoint status 2026-01-22 13:02:36.282575 | fa163e0d-6f45-64a1-ca76-000000003fe4 | CHANGED | Check Keystone admin endpoint status | undercloud | item=aodh 2026-01-22 13:02:36.283809 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:30.248817 | 0.32s 2026-01-22 13:02:36.548579 | fa163e0d-6f45-64a1-ca76-000000003fe4 | CHANGED | Check Keystone admin endpoint status | undercloud | item=cinderv3 2026-01-22 13:02:36.549707 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:30.514717 | 0.58s 2026-01-22 13:02:36.760071 | fa163e0d-6f45-64a1-ca76-000000003fe4 | CHANGED | Check Keystone admin endpoint status | undercloud | item=glance 2026-01-22 13:02:36.761178 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:30.726189 | 0.80s 2026-01-22 13:02:37.022198 | fa163e0d-6f45-64a1-ca76-000000003fe4 | CHANGED | Check Keystone admin endpoint status | undercloud | item=gnocchi 2026-01-22 13:02:37.023321 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:30.988331 | 1.06s 2026-01-22 13:02:37.265825 | fa163e0d-6f45-64a1-ca76-000000003fe4 | CHANGED | Check Keystone admin endpoint status | undercloud | item=heat 2026-01-22 13:02:37.267768 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:31.232776 | 1.30s 2026-01-22 13:02:37.491176 | fa163e0d-6f45-64a1-ca76-000000003fe4 | CHANGED | Check Keystone admin endpoint status | undercloud | item=heat-cfn 2026-01-22 13:02:37.491827 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:31.456845 | 1.53s 2026-01-22 13:02:37.743584 | fa163e0d-6f45-64a1-ca76-000000003fe4 | CHANGED | Check Keystone admin endpoint status | undercloud | item=manila 2026-01-22 13:02:37.744886 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:31.709894 | 1.78s 2026-01-22 13:02:38.048708 | fa163e0d-6f45-64a1-ca76-000000003fe4 | CHANGED | Check Keystone admin endpoint status | undercloud | item=manilav2 2026-01-22 13:02:38.051061 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:32.016062 | 2.08s 2026-01-22 13:02:38.338949 | fa163e0d-6f45-64a1-ca76-000000003fe4 | CHANGED | Check Keystone admin endpoint status | undercloud | item=neutron 2026-01-22 13:02:38.341313 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:32.306320 | 2.38s 2026-01-22 13:02:38.602768 | fa163e0d-6f45-64a1-ca76-000000003fe4 | CHANGED | Check Keystone admin endpoint status | undercloud | item=nova 2026-01-22 13:02:38.604185 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:32.569187 | 2.64s 2026-01-22 13:02:38.626942 | fa163e0d-6f45-64a1-ca76-000000003fe4 | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:32.591941 | 2.66s 2026-01-22 13:02:38.653553 | fa163e0d-6f45-64a1-ca76-000000003fe9 | TASK | Async creation of Keystone admin endpoint 2026-01-22 13:02:39.008496 | fa163e0d-6f45-64a1-ca76-000000003fe9 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=octavia 2026-01-22 13:02:39.011220 | fa163e0d-6f45-64a1-ca76-000000003fe9 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:32.976206 | 0.36s 2026-01-22 13:02:39.301132 | fa163e0d-6f45-64a1-ca76-000000003fe9 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=placement 2026-01-22 13:02:39.302827 | fa163e0d-6f45-64a1-ca76-000000003fe9 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:33.267838 | 0.65s 2026-01-22 13:02:39.567734 | fa163e0d-6f45-64a1-ca76-000000003fe9 | CHANGED | Async creation of Keystone admin endpoint | undercloud | item=swift 2026-01-22 13:02:39.568633 | fa163e0d-6f45-64a1-ca76-000000003fe9 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:33.533644 | 0.91s 2026-01-22 13:02:39.574055 | fa163e0d-6f45-64a1-ca76-000000003fe9 | TIMING | tripleo_keystone_resources : Async creation of Keystone admin endpoint | undercloud | 0:23:33.539076 | 0.92s 2026-01-22 13:02:39.589257 | fa163e0d-6f45-64a1-ca76-000000003feb | TASK | Check Keystone admin endpoint status 2026-01-22 13:02:39.870744 | fa163e0d-6f45-64a1-ca76-000000003feb | WAITING | Check Keystone admin endpoint status | undercloud | 30 retries left 2026-01-22 13:02:45.091452 | fa163e0d-6f45-64a1-ca76-000000003feb | CHANGED | Check Keystone admin endpoint status | undercloud | item=octavia 2026-01-22 13:02:45.093511 | fa163e0d-6f45-64a1-ca76-000000003feb | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:39.058513 | 5.50s 2026-01-22 13:02:45.361821 | fa163e0d-6f45-64a1-ca76-000000003feb | CHANGED | Check Keystone admin endpoint status | undercloud | item=placement 2026-01-22 13:02:45.363873 | fa163e0d-6f45-64a1-ca76-000000003feb | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:39.328880 | 5.77s 2026-01-22 13:02:45.653812 | fa163e0d-6f45-64a1-ca76-000000003feb | CHANGED | Check Keystone admin endpoint status | undercloud | item=swift 2026-01-22 13:02:45.654587 | fa163e0d-6f45-64a1-ca76-000000003feb | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:39.619604 | 6.06s 2026-01-22 13:02:45.660087 | fa163e0d-6f45-64a1-ca76-000000003feb | TIMING | tripleo_keystone_resources : Check Keystone admin endpoint status | undercloud | 0:23:39.625089 | 6.07s 2026-01-22 13:02:45.683653 | fa163e0d-6f45-64a1-ca76-000000003e63 | TASK | Create Keystone internal endpoints 2026-01-22 13:02:45.789950 | fa163e0d-6f45-64a1-ca76-000000003e63 | TIMING | tripleo_keystone_resources : Create Keystone internal endpoints | undercloud | 0:23:39.754953 | 0.10s 2026-01-22 13:02:45.809463 | fa163e0d-6f45-64a1-ca76-000000003e63 | TIMING | tripleo_keystone_resources : Create Keystone internal endpoints | undercloud | 0:23:39.774469 | 0.12s 2026-01-22 13:02:45.817980 | fa163e0d-6f45-64a1-ca76-000000003e63 | TIMING | tripleo_keystone_resources : Create Keystone internal endpoints | undercloud | 0:23:39.782979 | 0.13s 2026-01-22 13:02:45.837762 | a91ae54c-ad38-4eca-bec7-52836e545eea | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/endpoints.yml | undercloud 2026-01-22 13:02:45.847656 | 32deb603-0f34-46b1-884e-7e5a5ef7ad8f | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/endpoints.yml | undercloud 2026-01-22 13:02:45.874249 | fa163e0d-6f45-64a1-ca76-000000004043 | TASK | Async creation of Keystone internal endpoint 2026-01-22 13:02:46.190457 | fa163e0d-6f45-64a1-ca76-000000004043 | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=aodh 2026-01-22 13:02:46.192265 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:40.157278 | 0.32s 2026-01-22 13:02:46.471159 | fa163e0d-6f45-64a1-ca76-000000004043 | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=cinderv3 2026-01-22 13:02:46.472180 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:40.437190 | 0.60s 2026-01-22 13:02:46.786530 | fa163e0d-6f45-64a1-ca76-000000004043 | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=glance 2026-01-22 13:02:46.787500 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:40.752510 | 0.91s 2026-01-22 13:02:47.043591 | fa163e0d-6f45-64a1-ca76-000000004043 | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=gnocchi 2026-01-22 13:02:47.045755 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:41.010757 | 1.17s 2026-01-22 13:02:47.323432 | fa163e0d-6f45-64a1-ca76-000000004043 | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=heat 2026-01-22 13:02:47.324112 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:41.289129 | 1.45s 2026-01-22 13:02:47.594147 | fa163e0d-6f45-64a1-ca76-000000004043 | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=heat-cfn 2026-01-22 13:02:47.595873 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:41.560883 | 1.72s 2026-01-22 13:02:47.837518 | fa163e0d-6f45-64a1-ca76-000000004043 | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=manila 2026-01-22 13:02:47.838110 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:41.803130 | 1.96s 2026-01-22 13:02:48.146801 | fa163e0d-6f45-64a1-ca76-000000004043 | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=manilav2 2026-01-22 13:02:48.147677 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:42.112684 | 2.27s 2026-01-22 13:02:48.417335 | fa163e0d-6f45-64a1-ca76-000000004043 | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=neutron 2026-01-22 13:02:48.418026 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:42.383042 | 2.54s 2026-01-22 13:02:48.651643 | fa163e0d-6f45-64a1-ca76-000000004043 | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=nova 2026-01-22 13:02:48.652918 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:42.617929 | 2.78s 2026-01-22 13:02:48.663254 | fa163e0d-6f45-64a1-ca76-000000004043 | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:42.628276 | 2.79s 2026-01-22 13:02:48.679641 | fa163e0d-6f45-64a1-ca76-000000004045 | TASK | Check Keystone internal endpoint status 2026-01-22 13:02:48.953265 | fa163e0d-6f45-64a1-ca76-000000004045 | CHANGED | Check Keystone internal endpoint status | undercloud | item=aodh 2026-01-22 13:02:48.954875 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:42.919877 | 0.27s 2026-01-22 13:02:49.166417 | fa163e0d-6f45-64a1-ca76-000000004045 | CHANGED | Check Keystone internal endpoint status | undercloud | item=cinderv3 2026-01-22 13:02:49.167547 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:43.132558 | 0.49s 2026-01-22 13:02:49.443793 | fa163e0d-6f45-64a1-ca76-000000004045 | CHANGED | Check Keystone internal endpoint status | undercloud | item=glance 2026-01-22 13:02:49.444902 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:43.409913 | 0.76s 2026-01-22 13:02:49.680160 | fa163e0d-6f45-64a1-ca76-000000004045 | CHANGED | Check Keystone internal endpoint status | undercloud | item=gnocchi 2026-01-22 13:02:49.681276 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:43.646286 | 1.00s 2026-01-22 13:02:49.936055 | fa163e0d-6f45-64a1-ca76-000000004045 | CHANGED | Check Keystone internal endpoint status | undercloud | item=heat 2026-01-22 13:02:49.937233 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:43.902243 | 1.26s 2026-01-22 13:02:50.170359 | fa163e0d-6f45-64a1-ca76-000000004045 | WAITING | Check Keystone internal endpoint status | undercloud | 30 retries left 2026-01-22 13:02:55.364819 | fa163e0d-6f45-64a1-ca76-000000004045 | CHANGED | Check Keystone internal endpoint status | undercloud | item=heat-cfn 2026-01-22 13:02:55.365742 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:49.330756 | 6.69s 2026-01-22 13:02:55.599548 | fa163e0d-6f45-64a1-ca76-000000004045 | CHANGED | Check Keystone internal endpoint status | undercloud | item=manila 2026-01-22 13:02:55.601685 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:49.566657 | 6.92s 2026-01-22 13:02:55.846569 | fa163e0d-6f45-64a1-ca76-000000004045 | CHANGED | Check Keystone internal endpoint status | undercloud | item=manilav2 2026-01-22 13:02:55.848423 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:49.813432 | 7.17s 2026-01-22 13:02:56.102691 | fa163e0d-6f45-64a1-ca76-000000004045 | CHANGED | Check Keystone internal endpoint status | undercloud | item=neutron 2026-01-22 13:02:56.103852 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:50.068861 | 7.42s 2026-01-22 13:02:56.352376 | fa163e0d-6f45-64a1-ca76-000000004045 | CHANGED | Check Keystone internal endpoint status | undercloud | item=nova 2026-01-22 13:02:56.353571 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:50.318582 | 7.67s 2026-01-22 13:02:56.373051 | fa163e0d-6f45-64a1-ca76-000000004045 | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:50.338062 | 7.69s 2026-01-22 13:02:56.391369 | fa163e0d-6f45-64a1-ca76-00000000404a | TASK | Async creation of Keystone internal endpoint 2026-01-22 13:02:56.739161 | fa163e0d-6f45-64a1-ca76-00000000404a | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=octavia 2026-01-22 13:02:56.741110 | fa163e0d-6f45-64a1-ca76-00000000404a | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:50.706112 | 0.35s 2026-01-22 13:02:57.041335 | fa163e0d-6f45-64a1-ca76-00000000404a | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=placement 2026-01-22 13:02:57.042756 | fa163e0d-6f45-64a1-ca76-00000000404a | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:51.007740 | 0.65s 2026-01-22 13:02:57.306041 | fa163e0d-6f45-64a1-ca76-00000000404a | CHANGED | Async creation of Keystone internal endpoint | undercloud | item=swift 2026-01-22 13:02:57.307243 | fa163e0d-6f45-64a1-ca76-00000000404a | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:51.272214 | 0.91s 2026-01-22 13:02:57.313245 | fa163e0d-6f45-64a1-ca76-00000000404a | TIMING | tripleo_keystone_resources : Async creation of Keystone internal endpoint | undercloud | 0:23:51.278257 | 0.92s 2026-01-22 13:02:57.329052 | fa163e0d-6f45-64a1-ca76-00000000404c | TASK | Check Keystone internal endpoint status 2026-01-22 13:02:57.614630 | fa163e0d-6f45-64a1-ca76-00000000404c | WAITING | Check Keystone internal endpoint status | undercloud | 30 retries left 2026-01-22 13:03:02.830147 | fa163e0d-6f45-64a1-ca76-00000000404c | CHANGED | Check Keystone internal endpoint status | undercloud | item=octavia 2026-01-22 13:03:02.832375 | fa163e0d-6f45-64a1-ca76-00000000404c | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:56.797375 | 5.50s 2026-01-22 13:03:03.094099 | fa163e0d-6f45-64a1-ca76-00000000404c | CHANGED | Check Keystone internal endpoint status | undercloud | item=placement 2026-01-22 13:03:03.095222 | fa163e0d-6f45-64a1-ca76-00000000404c | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:57.060233 | 5.76s 2026-01-22 13:03:03.309071 | fa163e0d-6f45-64a1-ca76-00000000404c | CHANGED | Check Keystone internal endpoint status | undercloud | item=swift 2026-01-22 13:03:03.310400 | fa163e0d-6f45-64a1-ca76-00000000404c | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:57.275410 | 5.98s 2026-01-22 13:03:03.318239 | fa163e0d-6f45-64a1-ca76-00000000404c | TIMING | tripleo_keystone_resources : Check Keystone internal endpoint status | undercloud | 0:23:57.283241 | 5.99s 2026-01-22 13:03:03.339613 | fa163e0d-6f45-64a1-ca76-0000000039b0 | TASK | Run cleanup tasks 2026-01-22 13:03:03.394256 | fa163e0d-6f45-64a1-ca76-0000000039b0 | TIMING | tripleo_keystone_resources : Run cleanup tasks | undercloud | 0:23:57.359249 | 0.05s 2026-01-22 13:03:03.414419 | c8c78951-74a5-453f-a7d4-906d4693e398 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/cleanup.yml | undercloud 2026-01-22 13:03:03.482911 | fa163e0d-6f45-64a1-ca76-0000000040a2 | TASK | Get list of enabled services 2026-01-22 13:03:05.927036 | fa163e0d-6f45-64a1-ca76-0000000040a2 | CHANGED | Get list of enabled services | undercloud 2026-01-22 13:03:05.928999 | fa163e0d-6f45-64a1-ca76-0000000040a2 | TIMING | tripleo_keystone_resources : Get list of enabled services | undercloud | 0:23:59.894003 | 2.44s 2026-01-22 13:03:05.949463 | fa163e0d-6f45-64a1-ca76-0000000040a3 | TASK | List services to be disabled 2026-01-22 13:03:06.035204 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'placement', 'Type': 'placement'} 2026-01-22 13:03:06.072184 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'heat', 'Type': 'orchestration'} 2026-01-22 13:03:06.101001 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'gnocchi', 'Type': 'metric'} 2026-01-22 13:03:06.126309 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'neutron', 'Type': 'network'} 2026-01-22 13:03:06.151369 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'manila', 'Type': 'share'} 2026-01-22 13:03:06.176012 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'octavia', 'Type': 'load-balancer'} 2026-01-22 13:03:06.201390 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'glance', 'Type': 'image'} 2026-01-22 13:03:06.224951 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'nova', 'Type': 'compute'} 2026-01-22 13:03:06.250182 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'cinderv3', 'Type': 'volumev3'} 2026-01-22 13:03:06.275144 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'aodh', 'Type': 'alarming'} 2026-01-22 13:03:06.300384 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'manilav2', 'Type': 'sharev2'} 2026-01-22 13:03:06.326770 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'keystone', 'Type': 'identity'} 2026-01-22 13:03:06.352551 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'heat-cfn', 'Type': 'cloudformation'} 2026-01-22 13:03:06.375042 | fa163e0d-6f45-64a1-ca76-0000000040a3 | SKIPPED | List services to be disabled | undercloud | item={'Name': 'swift', 'Type': 'object-store'} 2026-01-22 13:03:06.387681 | fa163e0d-6f45-64a1-ca76-0000000040a3 | TIMING | tripleo_keystone_resources : List services to be disabled | undercloud | 0:24:00.352656 | 0.44s 2026-01-22 13:03:06.404409 | fa163e0d-6f45-64a1-ca76-0000000040a4 | TASK | Delete keystone services not enabled 2026-01-22 13:03:06.430856 | fa163e0d-6f45-64a1-ca76-0000000040a4 | TIMING | tripleo_keystone_resources : Delete keystone services not enabled | undercloud | 0:24:00.395866 | 0.03s 2026-01-22 13:03:06.445211 | fa163e0d-6f45-64a1-ca76-0000000039b1 | TASK | Create Keystone Roles 2026-01-22 13:03:06.532400 | fa163e0d-6f45-64a1-ca76-0000000039b1 | TIMING | tripleo_keystone_resources : Create Keystone Roles | undercloud | 0:24:00.497393 | 0.09s 2026-01-22 13:03:06.538966 | fa163e0d-6f45-64a1-ca76-0000000039b1 | TIMING | tripleo_keystone_resources : Create Keystone Roles | undercloud | 0:24:00.503965 | 0.09s 2026-01-22 13:03:06.553846 | ece1847e-b6f7-4a29-b2a7-fa0f09499e5d | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/roles.yml | undercloud 2026-01-22 13:03:06.618595 | fa163e0d-6f45-64a1-ca76-00000000420e | TASK | Async creation of Keystone role 2026-01-22 13:03:06.923637 | fa163e0d-6f45-64a1-ca76-00000000420e | CHANGED | Async creation of Keystone role | undercloud | item=service 2026-01-22 13:03:06.925086 | fa163e0d-6f45-64a1-ca76-00000000420e | TIMING | tripleo_keystone_resources : Async creation of Keystone role | undercloud | 0:24:00.890089 | 0.31s 2026-01-22 13:03:07.197670 | fa163e0d-6f45-64a1-ca76-00000000420e | CHANGED | Async creation of Keystone role | undercloud | item=ResellerAdmin 2026-01-22 13:03:07.199503 | fa163e0d-6f45-64a1-ca76-00000000420e | TIMING | tripleo_keystone_resources : Async creation of Keystone role | undercloud | 0:24:01.164518 | 0.58s 2026-01-22 13:03:07.497689 | fa163e0d-6f45-64a1-ca76-00000000420e | CHANGED | Async creation of Keystone role | undercloud | item=heat_stack_user 2026-01-22 13:03:07.498683 | fa163e0d-6f45-64a1-ca76-00000000420e | TIMING | tripleo_keystone_resources : Async creation of Keystone role | undercloud | 0:24:01.463666 | 0.88s 2026-01-22 13:03:07.750786 | fa163e0d-6f45-64a1-ca76-00000000420e | CHANGED | Async creation of Keystone role | undercloud | item=load-balancer_admin 2026-01-22 13:03:07.751859 | fa163e0d-6f45-64a1-ca76-00000000420e | TIMING | tripleo_keystone_resources : Async creation of Keystone role | undercloud | 0:24:01.716872 | 1.13s 2026-01-22 13:03:07.995079 | fa163e0d-6f45-64a1-ca76-00000000420e | CHANGED | Async creation of Keystone role | undercloud | item=load-balancer_quota_admin 2026-01-22 13:03:07.996047 | fa163e0d-6f45-64a1-ca76-00000000420e | TIMING | tripleo_keystone_resources : Async creation of Keystone role | undercloud | 0:24:01.961056 | 1.38s 2026-01-22 13:03:08.251180 | fa163e0d-6f45-64a1-ca76-00000000420e | CHANGED | Async creation of Keystone role | undercloud | item=load-balancer_member 2026-01-22 13:03:08.251825 | fa163e0d-6f45-64a1-ca76-00000000420e | TIMING | tripleo_keystone_resources : Async creation of Keystone role | undercloud | 0:24:02.216840 | 1.63s 2026-01-22 13:03:08.495736 | fa163e0d-6f45-64a1-ca76-00000000420e | CHANGED | Async creation of Keystone role | undercloud | item=load-balancer_global_observer 2026-01-22 13:03:08.496968 | fa163e0d-6f45-64a1-ca76-00000000420e | TIMING | tripleo_keystone_resources : Async creation of Keystone role | undercloud | 0:24:02.461981 | 1.88s 2026-01-22 13:03:08.787911 | fa163e0d-6f45-64a1-ca76-00000000420e | CHANGED | Async creation of Keystone role | undercloud | item=load-balancer_observer 2026-01-22 13:03:08.788857 | fa163e0d-6f45-64a1-ca76-00000000420e | TIMING | tripleo_keystone_resources : Async creation of Keystone role | undercloud | 0:24:02.753867 | 2.17s 2026-01-22 13:03:09.082245 | fa163e0d-6f45-64a1-ca76-00000000420e | CHANGED | Async creation of Keystone role | undercloud | item=swiftoperator 2026-01-22 13:03:09.082999 | fa163e0d-6f45-64a1-ca76-00000000420e | TIMING | tripleo_keystone_resources : Async creation of Keystone role | undercloud | 0:24:03.048018 | 2.46s 2026-01-22 13:03:09.086157 | fa163e0d-6f45-64a1-ca76-00000000420e | TIMING | tripleo_keystone_resources : Async creation of Keystone role | undercloud | 0:24:03.051178 | 2.47s 2026-01-22 13:03:09.100675 | fa163e0d-6f45-64a1-ca76-000000004210 | TASK | Check Keystone role status 2026-01-22 13:03:09.335762 | fa163e0d-6f45-64a1-ca76-000000004210 | CHANGED | Check Keystone role status | undercloud | item=service 2026-01-22 13:03:09.337145 | fa163e0d-6f45-64a1-ca76-000000004210 | TIMING | tripleo_keystone_resources : Check Keystone role status | undercloud | 0:24:03.302160 | 0.24s 2026-01-22 13:03:09.552494 | fa163e0d-6f45-64a1-ca76-000000004210 | CHANGED | Check Keystone role status | undercloud | item=ResellerAdmin 2026-01-22 13:03:09.553727 | fa163e0d-6f45-64a1-ca76-000000004210 | TIMING | tripleo_keystone_resources : Check Keystone role status | undercloud | 0:24:03.518736 | 0.45s 2026-01-22 13:03:09.788986 | fa163e0d-6f45-64a1-ca76-000000004210 | CHANGED | Check Keystone role status | undercloud | item=heat_stack_user 2026-01-22 13:03:09.790084 | fa163e0d-6f45-64a1-ca76-000000004210 | TIMING | tripleo_keystone_resources : Check Keystone role status | undercloud | 0:24:03.755093 | 0.69s 2026-01-22 13:03:10.042214 | fa163e0d-6f45-64a1-ca76-000000004210 | CHANGED | Check Keystone role status | undercloud | item=load-balancer_admin 2026-01-22 13:03:10.043252 | fa163e0d-6f45-64a1-ca76-000000004210 | TIMING | tripleo_keystone_resources : Check Keystone role status | undercloud | 0:24:04.008262 | 0.94s 2026-01-22 13:03:10.284921 | fa163e0d-6f45-64a1-ca76-000000004210 | CHANGED | Check Keystone role status | undercloud | item=load-balancer_quota_admin 2026-01-22 13:03:10.285750 | fa163e0d-6f45-64a1-ca76-000000004210 | TIMING | tripleo_keystone_resources : Check Keystone role status | undercloud | 0:24:04.250766 | 1.18s 2026-01-22 13:03:10.562923 | fa163e0d-6f45-64a1-ca76-000000004210 | CHANGED | Check Keystone role status | undercloud | item=load-balancer_member 2026-01-22 13:03:10.563584 | fa163e0d-6f45-64a1-ca76-000000004210 | TIMING | tripleo_keystone_resources : Check Keystone role status | undercloud | 0:24:04.528602 | 1.46s 2026-01-22 13:03:10.818768 | fa163e0d-6f45-64a1-ca76-000000004210 | CHANGED | Check Keystone role status | undercloud | item=load-balancer_global_observer 2026-01-22 13:03:10.819328 | fa163e0d-6f45-64a1-ca76-000000004210 | TIMING | tripleo_keystone_resources : Check Keystone role status | undercloud | 0:24:04.784346 | 1.72s 2026-01-22 13:03:11.090118 | fa163e0d-6f45-64a1-ca76-000000004210 | CHANGED | Check Keystone role status | undercloud | item=load-balancer_observer 2026-01-22 13:03:11.091263 | fa163e0d-6f45-64a1-ca76-000000004210 | TIMING | tripleo_keystone_resources : Check Keystone role status | undercloud | 0:24:05.056271 | 1.99s 2026-01-22 13:03:11.314600 | fa163e0d-6f45-64a1-ca76-000000004210 | CHANGED | Check Keystone role status | undercloud | item=swiftoperator 2026-01-22 13:03:11.315697 | fa163e0d-6f45-64a1-ca76-000000004210 | TIMING | tripleo_keystone_resources : Check Keystone role status | undercloud | 0:24:05.280706 | 2.21s 2026-01-22 13:03:11.325461 | fa163e0d-6f45-64a1-ca76-000000004210 | TIMING | tripleo_keystone_resources : Check Keystone role status | undercloud | 0:24:05.290462 | 2.22s 2026-01-22 13:03:11.348401 | fa163e0d-6f45-64a1-ca76-0000000039b3 | TASK | Create Keystone Users 2026-01-22 13:03:11.435553 | fa163e0d-6f45-64a1-ca76-0000000039b3 | TIMING | tripleo_keystone_resources : Create Keystone Users | undercloud | 0:24:05.400509 | 0.09s 2026-01-22 13:03:11.452575 | fa163e0d-6f45-64a1-ca76-0000000039b3 | TIMING | tripleo_keystone_resources : Create Keystone Users | undercloud | 0:24:05.417583 | 0.10s 2026-01-22 13:03:11.460584 | fa163e0d-6f45-64a1-ca76-0000000039b3 | TIMING | tripleo_keystone_resources : Create Keystone Users | undercloud | 0:24:05.425579 | 0.11s 2026-01-22 13:03:11.484832 | 724f0f5c-29b1-4cb5-9165-2ec41feb55bc | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/users.yml | undercloud 2026-01-22 13:03:11.498752 | 1b5f9550-cf02-4bd0-8d0f-db1009a0fb00 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/users.yml | undercloud 2026-01-22 13:03:11.556868 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TASK | Check password of Keystone user 2026-01-22 13:03:12.588345 | fa163e0d-6f45-64a1-ca76-0000000043a2 | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:12.590409 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:06.555422 | 1.03s 2026-01-22 13:03:13.559523 | fa163e0d-6f45-64a1-ca76-0000000043a2 | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:13.561085 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:07.526096 | 2.00s 2026-01-22 13:03:14.506787 | fa163e0d-6f45-64a1-ca76-0000000043a2 | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:14.508325 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:08.473339 | 2.95s 2026-01-22 13:03:15.425884 | fa163e0d-6f45-64a1-ca76-0000000043a2 | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:15.427512 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:09.392519 | 3.87s 2026-01-22 13:03:16.335518 | fa163e0d-6f45-64a1-ca76-0000000043a2 | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:16.336742 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:10.301749 | 4.78s 2026-01-22 13:03:17.308980 | fa163e0d-6f45-64a1-ca76-0000000043a2 | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:17.310341 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:11.275358 | 5.75s 2026-01-22 13:03:18.276691 | fa163e0d-6f45-64a1-ca76-0000000043a2 | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:18.277751 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:12.242753 | 6.72s 2026-01-22 13:03:19.206765 | fa163e0d-6f45-64a1-ca76-0000000043a2 | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:19.207336 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:13.172354 | 7.65s 2026-01-22 13:03:20.169482 | fa163e0d-6f45-64a1-ca76-0000000043a2 | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:20.170222 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:14.135236 | 8.61s 2026-01-22 13:03:21.134911 | fa163e0d-6f45-64a1-ca76-0000000043a2 | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:21.136232 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:15.101247 | 9.58s 2026-01-22 13:03:21.146239 | fa163e0d-6f45-64a1-ca76-0000000043a2 | IGNORED | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:21.147027 | fa163e0d-6f45-64a1-ca76-0000000043a2 | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:15.112042 | 9.59s 2026-01-22 13:03:21.163342 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TASK | Async creation of Keystone user 2026-01-22 13:03:21.560272 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:21.562282 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:15.527283 | 0.40s 2026-01-22 13:03:21.902297 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:21.903269 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:15.868277 | 0.74s 2026-01-22 13:03:22.269526 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:22.270983 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:16.235992 | 1.11s 2026-01-22 13:03:22.645792 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:22.646838 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:16.611847 | 1.48s 2026-01-22 13:03:23.027785 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:23.028813 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:16.993822 | 1.86s 2026-01-22 13:03:23.401363 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:23.402028 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:17.367046 | 2.24s 2026-01-22 13:03:23.736049 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:23.736565 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:17.701583 | 2.57s 2026-01-22 13:03:24.076597 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:24.077194 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:18.042212 | 2.91s 2026-01-22 13:03:24.354102 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:24.354673 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:18.319668 | 3.19s 2026-01-22 13:03:24.690368 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:24.690921 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:18.655940 | 3.53s 2026-01-22 13:03:24.694891 | fa163e0d-6f45-64a1-ca76-0000000043a4 | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:24.695500 | fa163e0d-6f45-64a1-ca76-0000000043a4 | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:18.660520 | 3.53s 2026-01-22 13:03:24.710023 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TASK | Check Keystone user status 2026-01-22 13:03:24.964623 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:24.966124 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:18.931125 | 0.26s 2026-01-22 13:03:25.240600 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:25.241195 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:19.206213 | 0.53s 2026-01-22 13:03:25.493911 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:25.494561 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:19.459546 | 0.78s 2026-01-22 13:03:25.739850 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:25.740394 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:19.705411 | 1.03s 2026-01-22 13:03:25.958906 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:25.959833 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:19.924844 | 1.25s 2026-01-22 13:03:26.281558 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:26.282861 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:20.247871 | 1.57s 2026-01-22 13:03:26.549556 | fa163e0d-6f45-64a1-ca76-0000000043a6 | WAITING | Check Keystone user status | undercloud | 30 retries left 2026-01-22 13:03:31.767413 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:31.768846 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:25.733853 | 7.06s 2026-01-22 13:03:32.059972 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:32.060918 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:26.025926 | 7.35s 2026-01-22 13:03:32.342117 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:32.343972 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:26.308979 | 7.63s 2026-01-22 13:03:32.613025 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:32.614539 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:26.579556 | 7.90s 2026-01-22 13:03:32.625149 | fa163e0d-6f45-64a1-ca76-0000000043a6 | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:32.627138 | fa163e0d-6f45-64a1-ca76-0000000043a6 | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:26.592143 | 7.92s 2026-01-22 13:03:32.650765 | fa163e0d-6f45-64a1-ca76-0000000043ab | TASK | Check password of Keystone user 2026-01-22 13:03:33.662515 | fa163e0d-6f45-64a1-ca76-0000000043ab | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:33.663406 | fa163e0d-6f45-64a1-ca76-0000000043ab | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:27.628419 | 1.01s 2026-01-22 13:03:34.747599 | fa163e0d-6f45-64a1-ca76-0000000043ab | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:34.749874 | fa163e0d-6f45-64a1-ca76-0000000043ab | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:28.714884 | 2.10s 2026-01-22 13:03:35.667009 | fa163e0d-6f45-64a1-ca76-0000000043ab | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:35.668382 | fa163e0d-6f45-64a1-ca76-0000000043ab | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:29.633396 | 3.02s 2026-01-22 13:03:36.623417 | fa163e0d-6f45-64a1-ca76-0000000043ab | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:36.624456 | fa163e0d-6f45-64a1-ca76-0000000043ab | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:30.589465 | 3.97s 2026-01-22 13:03:37.549822 | fa163e0d-6f45-64a1-ca76-0000000043ab | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:37.551439 | fa163e0d-6f45-64a1-ca76-0000000043ab | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:31.516448 | 4.90s 2026-01-22 13:03:38.440331 | fa163e0d-6f45-64a1-ca76-0000000043ab | FATAL | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:38.441608 | fa163e0d-6f45-64a1-ca76-0000000043ab | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:32.406620 | 5.79s 2026-01-22 13:03:38.451196 | fa163e0d-6f45-64a1-ca76-0000000043ab | IGNORED | Check password of Keystone user | undercloud | error={"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} 2026-01-22 13:03:38.451540 | fa163e0d-6f45-64a1-ca76-0000000043ab | TIMING | tripleo_keystone_resources : Check password of Keystone user | undercloud | 0:24:32.416561 | 5.80s 2026-01-22 13:03:38.466299 | fa163e0d-6f45-64a1-ca76-0000000043ad | TASK | Async creation of Keystone user 2026-01-22 13:03:38.859193 | fa163e0d-6f45-64a1-ca76-0000000043ad | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:38.861199 | fa163e0d-6f45-64a1-ca76-0000000043ad | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:32.826202 | 0.39s 2026-01-22 13:03:39.193213 | fa163e0d-6f45-64a1-ca76-0000000043ad | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:39.193899 | fa163e0d-6f45-64a1-ca76-0000000043ad | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:33.158915 | 0.73s 2026-01-22 13:03:39.547351 | fa163e0d-6f45-64a1-ca76-0000000043ad | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:39.547900 | fa163e0d-6f45-64a1-ca76-0000000043ad | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:33.512906 | 1.08s 2026-01-22 13:03:39.864531 | fa163e0d-6f45-64a1-ca76-0000000043ad | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:39.865670 | fa163e0d-6f45-64a1-ca76-0000000043ad | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:33.830669 | 1.40s 2026-01-22 13:03:40.161488 | fa163e0d-6f45-64a1-ca76-0000000043ad | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:40.162216 | fa163e0d-6f45-64a1-ca76-0000000043ad | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:34.127231 | 1.70s 2026-01-22 13:03:40.489175 | fa163e0d-6f45-64a1-ca76-0000000043ad | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:40.490689 | fa163e0d-6f45-64a1-ca76-0000000043ad | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:34.455698 | 2.02s 2026-01-22 13:03:40.500606 | fa163e0d-6f45-64a1-ca76-0000000043ad | CHANGED | Async creation of Keystone user | undercloud 2026-01-22 13:03:40.501322 | fa163e0d-6f45-64a1-ca76-0000000043ad | TIMING | tripleo_keystone_resources : Async creation of Keystone user | undercloud | 0:24:34.466337 | 2.03s 2026-01-22 13:03:40.516868 | fa163e0d-6f45-64a1-ca76-0000000043af | TASK | Check Keystone user status 2026-01-22 13:03:40.762892 | fa163e0d-6f45-64a1-ca76-0000000043af | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:40.764477 | fa163e0d-6f45-64a1-ca76-0000000043af | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:34.729480 | 0.25s 2026-01-22 13:03:41.022096 | fa163e0d-6f45-64a1-ca76-0000000043af | WAITING | Check Keystone user status | undercloud | 30 retries left 2026-01-22 13:03:46.241890 | fa163e0d-6f45-64a1-ca76-0000000043af | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:46.242833 | fa163e0d-6f45-64a1-ca76-0000000043af | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:40.207844 | 5.72s 2026-01-22 13:03:46.524116 | fa163e0d-6f45-64a1-ca76-0000000043af | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:46.526781 | fa163e0d-6f45-64a1-ca76-0000000043af | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:40.491790 | 6.01s 2026-01-22 13:03:46.794601 | fa163e0d-6f45-64a1-ca76-0000000043af | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:46.797359 | fa163e0d-6f45-64a1-ca76-0000000043af | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:40.762367 | 6.28s 2026-01-22 13:03:47.059857 | fa163e0d-6f45-64a1-ca76-0000000043af | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:47.061762 | fa163e0d-6f45-64a1-ca76-0000000043af | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:41.026757 | 6.54s 2026-01-22 13:03:47.300328 | fa163e0d-6f45-64a1-ca76-0000000043af | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:47.301962 | fa163e0d-6f45-64a1-ca76-0000000043af | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:41.266970 | 6.78s 2026-01-22 13:03:47.312046 | fa163e0d-6f45-64a1-ca76-0000000043af | CHANGED | Check Keystone user status | undercloud 2026-01-22 13:03:47.312961 | fa163e0d-6f45-64a1-ca76-0000000043af | TIMING | tripleo_keystone_resources : Check Keystone user status | undercloud | 0:24:41.277970 | 6.79s 2026-01-22 13:03:47.332891 | fa163e0d-6f45-64a1-ca76-0000000039b5 | TASK | Assign Keystone Users to Roles 2026-01-22 13:03:47.422949 | fa163e0d-6f45-64a1-ca76-0000000039b5 | TIMING | tripleo_keystone_resources : Assign Keystone Users to Roles | undercloud | 0:24:41.387941 | 0.09s 2026-01-22 13:03:47.456429 | fa163e0d-6f45-64a1-ca76-0000000039b5 | TIMING | tripleo_keystone_resources : Assign Keystone Users to Roles | undercloud | 0:24:41.421440 | 0.12s 2026-01-22 13:03:47.479970 | fa163e0d-6f45-64a1-ca76-0000000039b5 | TIMING | tripleo_keystone_resources : Assign Keystone Users to Roles | undercloud | 0:24:41.444987 | 0.15s 2026-01-22 13:03:47.493548 | fa163e0d-6f45-64a1-ca76-0000000039b5 | TIMING | tripleo_keystone_resources : Assign Keystone Users to Roles | undercloud | 0:24:41.458552 | 0.16s 2026-01-22 13:03:47.507353 | df71721e-9ea6-4c10-92ed-ec636699fb4f | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/user_roles.yml | undercloud 2026-01-22 13:03:47.513698 | 19065f20-2962-4039-a1a7-26b0c9917c06 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/user_roles.yml | undercloud 2026-01-22 13:03:47.519411 | 49eb7563-1c6b-4c16-b37f-3c8ffd90311e | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/user_roles.yml | undercloud 2026-01-22 13:03:47.593968 | fa163e0d-6f45-64a1-ca76-0000000045a2 | TASK | Async assignment of Keystone user per role 2026-01-22 13:03:47.692503 | fa163e0d-6f45-64a1-ca76-0000000045a2 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user per role | undercloud | 0:24:41.657487 | 0.10s 2026-01-22 13:03:47.710240 | fa163e0d-6f45-64a1-ca76-0000000045a2 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user per role | undercloud | 0:24:41.675251 | 0.12s 2026-01-22 13:03:47.718048 | fa163e0d-6f45-64a1-ca76-0000000045a2 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user per role | undercloud | 0:24:41.683049 | 0.12s 2026-01-22 13:03:47.734813 | c87b86f1-79f3-4a2b-939e-2107fb869a8f | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/user_per_role.yml | undercloud 2026-01-22 13:03:47.744362 | 6b777858-b937-4721-b293-a5385a262391 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/user_per_role.yml | undercloud 2026-01-22 13:03:47.768178 | fa163e0d-6f45-64a1-ca76-00000000476b | TASK | Async assignment of Keystone user to roles 2026-01-22 13:03:48.133909 | fa163e0d-6f45-64a1-ca76-00000000476b | CHANGED | Async assignment of Keystone user to roles | undercloud | item=aodh 2026-01-22 13:03:48.136326 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:42.101327 | 0.37s 2026-01-22 13:03:48.423715 | fa163e0d-6f45-64a1-ca76-00000000476b | CHANGED | Async assignment of Keystone user to roles | undercloud | item=ceilometer 2026-01-22 13:03:48.424706 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:42.389710 | 0.65s 2026-01-22 13:03:48.745982 | fa163e0d-6f45-64a1-ca76-00000000476b | CHANGED | Async assignment of Keystone user to roles | undercloud | item=cinder 2026-01-22 13:03:48.746910 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:42.711920 | 0.98s 2026-01-22 13:03:49.048691 | fa163e0d-6f45-64a1-ca76-00000000476b | CHANGED | Async assignment of Keystone user to roles | undercloud | item=cinderv3 2026-01-22 13:03:49.050603 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:43.015614 | 1.28s 2026-01-22 13:03:49.395883 | fa163e0d-6f45-64a1-ca76-00000000476b | CHANGED | Async assignment of Keystone user to roles | undercloud | item=glance 2026-01-22 13:03:49.396839 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:43.361851 | 1.63s 2026-01-22 13:03:49.745119 | fa163e0d-6f45-64a1-ca76-00000000476b | CHANGED | Async assignment of Keystone user to roles | undercloud | item=gnocchi 2026-01-22 13:03:49.746051 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:43.711062 | 1.98s 2026-01-22 13:03:50.082865 | fa163e0d-6f45-64a1-ca76-00000000476b | CHANGED | Async assignment of Keystone user to roles | undercloud | item=heat 2026-01-22 13:03:50.083812 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:44.048822 | 2.31s 2026-01-22 13:03:50.392724 | fa163e0d-6f45-64a1-ca76-00000000476b | CHANGED | Async assignment of Keystone user to roles | undercloud | item=heat_stack_domain_admin 2026-01-22 13:03:50.393873 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:44.358888 | 2.62s 2026-01-22 13:03:50.656951 | fa163e0d-6f45-64a1-ca76-00000000476b | CHANGED | Async assignment of Keystone user to roles | undercloud | item=heat-cfn 2026-01-22 13:03:50.658664 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:44.623668 | 2.89s 2026-01-22 13:03:50.997418 | fa163e0d-6f45-64a1-ca76-00000000476b | CHANGED | Async assignment of Keystone user to roles | undercloud | item=manila 2026-01-22 13:03:50.998016 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:44.963033 | 3.23s 2026-01-22 13:03:51.001543 | fa163e0d-6f45-64a1-ca76-00000000476b | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:44.966554 | 3.23s 2026-01-22 13:03:51.020703 | fa163e0d-6f45-64a1-ca76-00000000476d | TASK | Check Keystone user assignment to roles status 2026-01-22 13:03:51.272073 | fa163e0d-6f45-64a1-ca76-00000000476d | WAITING | Check Keystone user assignment to roles status | undercloud | 30 retries left 2026-01-22 13:03:56.484459 | fa163e0d-6f45-64a1-ca76-00000000476d | CHANGED | Check Keystone user assignment to roles status | undercloud | item=aodh 2026-01-22 13:03:56.485423 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:50.450434 | 5.46s 2026-01-22 13:03:56.742177 | fa163e0d-6f45-64a1-ca76-00000000476d | CHANGED | Check Keystone user assignment to roles status | undercloud | item=ceilometer 2026-01-22 13:03:56.743218 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:50.708226 | 5.72s 2026-01-22 13:03:57.003748 | fa163e0d-6f45-64a1-ca76-00000000476d | CHANGED | Check Keystone user assignment to roles status | undercloud | item=cinder 2026-01-22 13:03:57.004290 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:50.969308 | 5.98s 2026-01-22 13:03:57.233639 | fa163e0d-6f45-64a1-ca76-00000000476d | CHANGED | Check Keystone user assignment to roles status | undercloud | item=cinderv3 2026-01-22 13:03:57.235453 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:51.200465 | 6.21s 2026-01-22 13:03:57.478066 | fa163e0d-6f45-64a1-ca76-00000000476d | CHANGED | Check Keystone user assignment to roles status | undercloud | item=glance 2026-01-22 13:03:57.478942 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:51.443955 | 6.46s 2026-01-22 13:03:57.726293 | fa163e0d-6f45-64a1-ca76-00000000476d | CHANGED | Check Keystone user assignment to roles status | undercloud | item=gnocchi 2026-01-22 13:03:57.727141 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:51.692154 | 6.71s 2026-01-22 13:03:57.961067 | fa163e0d-6f45-64a1-ca76-00000000476d | CHANGED | Check Keystone user assignment to roles status | undercloud | item=heat 2026-01-22 13:03:57.962815 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:51.927826 | 6.94s 2026-01-22 13:03:58.242995 | fa163e0d-6f45-64a1-ca76-00000000476d | CHANGED | Check Keystone user assignment to roles status | undercloud | item=heat_stack_domain_admin 2026-01-22 13:03:58.244864 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:52.209873 | 7.22s 2026-01-22 13:03:58.501381 | fa163e0d-6f45-64a1-ca76-00000000476d | CHANGED | Check Keystone user assignment to roles status | undercloud | item=heat-cfn 2026-01-22 13:03:58.502509 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:52.467518 | 7.48s 2026-01-22 13:03:58.756790 | fa163e0d-6f45-64a1-ca76-00000000476d | CHANGED | Check Keystone user assignment to roles status | undercloud | item=manila 2026-01-22 13:03:58.757865 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:52.722874 | 7.74s 2026-01-22 13:03:58.767341 | fa163e0d-6f45-64a1-ca76-00000000476d | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:24:52.732345 | 7.75s 2026-01-22 13:03:58.790960 | fa163e0d-6f45-64a1-ca76-000000004772 | TASK | Async assignment of Keystone user to roles 2026-01-22 13:03:59.199862 | fa163e0d-6f45-64a1-ca76-000000004772 | CHANGED | Async assignment of Keystone user to roles | undercloud | item=manilav2 2026-01-22 13:03:59.202290 | fa163e0d-6f45-64a1-ca76-000000004772 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:53.167290 | 0.41s 2026-01-22 13:03:59.520118 | fa163e0d-6f45-64a1-ca76-000000004772 | CHANGED | Async assignment of Keystone user to roles | undercloud | item=neutron 2026-01-22 13:03:59.521105 | fa163e0d-6f45-64a1-ca76-000000004772 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:53.486115 | 0.73s 2026-01-22 13:03:59.818391 | fa163e0d-6f45-64a1-ca76-000000004772 | CHANGED | Async assignment of Keystone user to roles | undercloud | item=nova 2026-01-22 13:03:59.819925 | fa163e0d-6f45-64a1-ca76-000000004772 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:53.784934 | 1.03s 2026-01-22 13:04:00.106210 | fa163e0d-6f45-64a1-ca76-000000004772 | CHANGED | Async assignment of Keystone user to roles | undercloud | item=octavia 2026-01-22 13:04:00.108202 | fa163e0d-6f45-64a1-ca76-000000004772 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:54.073215 | 1.32s 2026-01-22 13:04:00.378259 | fa163e0d-6f45-64a1-ca76-000000004772 | CHANGED | Async assignment of Keystone user to roles | undercloud | item=placement 2026-01-22 13:04:00.378961 | fa163e0d-6f45-64a1-ca76-000000004772 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:54.343975 | 1.59s 2026-01-22 13:04:00.698410 | fa163e0d-6f45-64a1-ca76-000000004772 | CHANGED | Async assignment of Keystone user to roles | undercloud | item=swift 2026-01-22 13:04:00.699145 | fa163e0d-6f45-64a1-ca76-000000004772 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:54.664161 | 1.91s 2026-01-22 13:04:00.706822 | fa163e0d-6f45-64a1-ca76-000000004772 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:24:54.671835 | 1.91s 2026-01-22 13:04:00.722086 | fa163e0d-6f45-64a1-ca76-000000004774 | TASK | Check Keystone user assignment to roles status 2026-01-22 13:04:01.057327 | fa163e0d-6f45-64a1-ca76-000000004774 | WAITING | Check Keystone user assignment to roles status | undercloud | 30 retries left 2026-01-22 13:04:06.261281 | fa163e0d-6f45-64a1-ca76-000000004774 | CHANGED | Check Keystone user assignment to roles status | undercloud | item=manilav2 2026-01-22 13:04:06.262601 | fa163e0d-6f45-64a1-ca76-000000004774 | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:00.227604 | 5.54s 2026-01-22 13:04:06.491535 | fa163e0d-6f45-64a1-ca76-000000004774 | CHANGED | Check Keystone user assignment to roles status | undercloud | item=neutron 2026-01-22 13:04:06.493783 | fa163e0d-6f45-64a1-ca76-000000004774 | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:00.458793 | 5.77s 2026-01-22 13:04:06.753735 | fa163e0d-6f45-64a1-ca76-000000004774 | CHANGED | Check Keystone user assignment to roles status | undercloud | item=nova 2026-01-22 13:04:06.756573 | fa163e0d-6f45-64a1-ca76-000000004774 | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:00.721584 | 6.03s 2026-01-22 13:04:07.018409 | fa163e0d-6f45-64a1-ca76-000000004774 | CHANGED | Check Keystone user assignment to roles status | undercloud | item=octavia 2026-01-22 13:04:07.019257 | fa163e0d-6f45-64a1-ca76-000000004774 | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:00.984272 | 6.30s 2026-01-22 13:04:07.260829 | fa163e0d-6f45-64a1-ca76-000000004774 | CHANGED | Check Keystone user assignment to roles status | undercloud | item=placement 2026-01-22 13:04:07.262480 | fa163e0d-6f45-64a1-ca76-000000004774 | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:01.227494 | 6.54s 2026-01-22 13:04:07.497143 | fa163e0d-6f45-64a1-ca76-000000004774 | CHANGED | Check Keystone user assignment to roles status | undercloud | item=swift 2026-01-22 13:04:07.498733 | fa163e0d-6f45-64a1-ca76-000000004774 | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:01.463746 | 6.78s 2026-01-22 13:04:07.510846 | fa163e0d-6f45-64a1-ca76-000000004774 | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:01.475850 | 6.79s 2026-01-22 13:04:07.532198 | fa163e0d-6f45-64a1-ca76-0000000045a7 | TASK | Async assignment of Keystone user per role 2026-01-22 13:04:07.603274 | fa163e0d-6f45-64a1-ca76-0000000045a7 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user per role | undercloud | 0:25:01.568274 | 0.07s 2026-01-22 13:04:07.615404 | fa163e0d-6f45-64a1-ca76-0000000045a7 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user per role | undercloud | 0:25:01.580410 | 0.08s 2026-01-22 13:04:07.628550 | f74cc4c6-bb90-48d6-a4bb-6ec9eb7feeca | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/user_per_role.yml | undercloud 2026-01-22 13:04:07.653719 | fa163e0d-6f45-64a1-ca76-0000000047d8 | TASK | Async assignment of Keystone user to roles 2026-01-22 13:04:08.006737 | fa163e0d-6f45-64a1-ca76-0000000047d8 | CHANGED | Async assignment of Keystone user to roles | undercloud | item=ceilometer 2026-01-22 13:04:08.008911 | fa163e0d-6f45-64a1-ca76-0000000047d8 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:25:01.973925 | 0.35s 2026-01-22 13:04:08.019390 | fa163e0d-6f45-64a1-ca76-0000000047d8 | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:25:01.984391 | 0.36s 2026-01-22 13:04:08.042041 | fa163e0d-6f45-64a1-ca76-0000000047da | TASK | Check Keystone user assignment to roles status 2026-01-22 13:04:08.264849 | fa163e0d-6f45-64a1-ca76-0000000047da | WAITING | Check Keystone user assignment to roles status | undercloud | 30 retries left 2026-01-22 13:04:13.475169 | fa163e0d-6f45-64a1-ca76-0000000047da | CHANGED | Check Keystone user assignment to roles status | undercloud | item=ceilometer 2026-01-22 13:04:13.476816 | fa163e0d-6f45-64a1-ca76-0000000047da | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:07.441828 | 5.43s 2026-01-22 13:04:13.487018 | fa163e0d-6f45-64a1-ca76-0000000047da | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:07.452029 | 5.44s 2026-01-22 13:04:13.501609 | fa163e0d-6f45-64a1-ca76-0000000045ac | TASK | Async assignment of Keystone user per role 2026-01-22 13:04:13.590339 | fa163e0d-6f45-64a1-ca76-0000000045ac | TIMING | tripleo_keystone_resources : Async assignment of Keystone user per role | undercloud | 0:25:07.555344 | 0.09s 2026-01-22 13:04:13.594610 | fa163e0d-6f45-64a1-ca76-0000000045ac | TIMING | tripleo_keystone_resources : Async assignment of Keystone user per role | undercloud | 0:25:07.559609 | 0.09s 2026-01-22 13:04:13.609367 | c08b8f1c-6e5c-49a0-84b8-ae5102a8ebf7 | INCLUDED | /usr/share/ansible/roles/tripleo_keystone_resources/tasks/user_per_role.yml | undercloud 2026-01-22 13:04:13.633363 | fa163e0d-6f45-64a1-ca76-0000000047fc | TASK | Async assignment of Keystone user to roles 2026-01-22 13:04:13.983847 | fa163e0d-6f45-64a1-ca76-0000000047fc | CHANGED | Async assignment of Keystone user to roles | undercloud | item=cinder 2026-01-22 13:04:13.985421 | fa163e0d-6f45-64a1-ca76-0000000047fc | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:25:07.950435 | 0.35s 2026-01-22 13:04:14.305118 | fa163e0d-6f45-64a1-ca76-0000000047fc | CHANGED | Async assignment of Keystone user to roles | undercloud | item=cinderv3 2026-01-22 13:04:14.306113 | fa163e0d-6f45-64a1-ca76-0000000047fc | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:25:08.271122 | 0.67s 2026-01-22 13:04:14.623226 | fa163e0d-6f45-64a1-ca76-0000000047fc | CHANGED | Async assignment of Keystone user to roles | undercloud | item=nova 2026-01-22 13:04:14.625039 | fa163e0d-6f45-64a1-ca76-0000000047fc | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:25:08.590048 | 0.99s 2026-01-22 13:04:14.635416 | fa163e0d-6f45-64a1-ca76-0000000047fc | TIMING | tripleo_keystone_resources : Async assignment of Keystone user to roles | undercloud | 0:25:08.600421 | 1.00s 2026-01-22 13:04:14.652469 | fa163e0d-6f45-64a1-ca76-0000000047fe | TASK | Check Keystone user assignment to roles status 2026-01-22 13:04:14.960217 | fa163e0d-6f45-64a1-ca76-0000000047fe | WAITING | Check Keystone user assignment to roles status | undercloud | 30 retries left 2026-01-22 13:04:20.180520 | fa163e0d-6f45-64a1-ca76-0000000047fe | CHANGED | Check Keystone user assignment to roles status | undercloud | item=cinder 2026-01-22 13:04:20.182382 | fa163e0d-6f45-64a1-ca76-0000000047fe | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:14.147396 | 5.53s 2026-01-22 13:04:20.437170 | fa163e0d-6f45-64a1-ca76-0000000047fe | CHANGED | Check Keystone user assignment to roles status | undercloud | item=cinderv3 2026-01-22 13:04:20.439835 | fa163e0d-6f45-64a1-ca76-0000000047fe | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:14.404844 | 5.79s 2026-01-22 13:04:20.695751 | fa163e0d-6f45-64a1-ca76-0000000047fe | CHANGED | Check Keystone user assignment to roles status | undercloud | item=nova 2026-01-22 13:04:20.697991 | fa163e0d-6f45-64a1-ca76-0000000047fe | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:14.663001 | 6.04s 2026-01-22 13:04:20.709370 | fa163e0d-6f45-64a1-ca76-0000000047fe | TIMING | tripleo_keystone_resources : Check Keystone user assignment to roles status | undercloud | 0:25:14.674375 | 6.06s 2026-01-22 13:04:20.749049 | fa163e0d-6f45-64a1-ca76-00000000397b | TASK | is Keystone LDAP enabled 2026-01-22 13:04:20.808918 | fa163e0d-6f45-64a1-ca76-00000000397b | OK | is Keystone LDAP enabled | undercloud 2026-01-22 13:04:20.810314 | fa163e0d-6f45-64a1-ca76-00000000397b | TIMING | is Keystone LDAP enabled | undercloud | 0:25:14.775322 | 0.06s 2026-01-22 13:04:20.828056 | fa163e0d-6f45-64a1-ca76-00000000397c | TASK | Set fact for tripleo_keystone_ldap_domains 2026-01-22 13:04:20.882703 | fa163e0d-6f45-64a1-ca76-00000000397c | SKIPPED | Set fact for tripleo_keystone_ldap_domains | undercloud 2026-01-22 13:04:20.883937 | fa163e0d-6f45-64a1-ca76-00000000397c | TIMING | Set fact for tripleo_keystone_ldap_domains | undercloud | 0:25:14.848943 | 0.05s 2026-01-22 13:04:20.903035 | fa163e0d-6f45-64a1-ca76-00000000397d | TASK | Manage Keystone domains from LDAP config 2026-01-22 13:04:20.957333 | fa163e0d-6f45-64a1-ca76-00000000397d | SKIPPED | Manage Keystone domains from LDAP config | undercloud 2026-01-22 13:04:20.958527 | fa163e0d-6f45-64a1-ca76-00000000397d | TIMING | Manage Keystone domains from LDAP config | undercloud | 0:25:14.923527 | 0.05s PLAY [Deploy step tasks for 4] ************************************************* 2026-01-22 13:04:21.101146 | fa163e0d-6f45-64a1-ca76-0000000000e2 | TASK | Deploy step tasks for 4 2026-01-22 13:04:21.131343 | fa163e0d-6f45-64a1-ca76-0000000000e2 | OK | Deploy step tasks for 4 | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Deploy step tasks for 4' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000e2') missing from stats 2026-01-22 13:04:21.154331 | fa163e0d-6f45-64a1-ca76-0000000000e3 | TASK | Write the config_step hieradata for the deploy step 4 tasks 2026-01-22 13:04:21.668750 | fa163e0d-6f45-64a1-ca76-0000000000e3 | CHANGED | Write the config_step hieradata for the deploy step 4 tasks | standalone 2026-01-22 13:04:21.670166 | fa163e0d-6f45-64a1-ca76-0000000000e3 | TIMING | Write the config_step hieradata for the deploy step 4 tasks | standalone | 0:25:15.635174 | 0.51s [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: '{{ playbook_dir }}/{{ _task_file_path }}' is exists 2026-01-22 13:04:21.742014 | fa163e0d-6f45-64a1-ca76-0000000000e4 | TIMING | include_tasks | standalone | 0:25:15.707017 | 0.04s 2026-01-22 13:04:21.784595 | 2bde4e67-6ec8-4ff4-8114-5fb8837c99f2 | INCLUDED | /root/standalone-ansible-mz1ymllk/Standalone/deploy_steps_tasks_step4.yaml | standalone 2026-01-22 13:04:21.816558 | fa163e0d-6f45-64a1-ca76-000000004840 | TASK | Create virtqemud recovery script 2026-01-22 13:04:22.316484 | fa163e0d-6f45-64a1-ca76-000000004840 | CHANGED | Create virtqemud recovery script | standalone 2026-01-22 13:04:22.318184 | fa163e0d-6f45-64a1-ca76-000000004840 | TIMING | Create virtqemud recovery script | standalone | 0:25:16.283192 | 0.50s 2026-01-22 13:04:22.344108 | fa163e0d-6f45-64a1-ca76-000000004841 | TASK | Create virtqemud recovery trigger service 2026-01-22 13:04:22.867243 | fa163e0d-6f45-64a1-ca76-000000004841 | CHANGED | Create virtqemud recovery trigger service | standalone 2026-01-22 13:04:22.868018 | fa163e0d-6f45-64a1-ca76-000000004841 | TIMING | Create virtqemud recovery trigger service | standalone | 0:25:16.833035 | 0.52s 2026-01-22 13:04:22.895587 | fa163e0d-6f45-64a1-ca76-000000004842 | TASK | Create virtqemud recovery trigger timer 2026-01-22 13:04:23.418557 | fa163e0d-6f45-64a1-ca76-000000004842 | CHANGED | Create virtqemud recovery trigger timer | standalone 2026-01-22 13:04:23.419555 | fa163e0d-6f45-64a1-ca76-000000004842 | TIMING | Create virtqemud recovery trigger timer | standalone | 0:25:17.384568 | 0.52s 2026-01-22 13:04:23.441359 | fa163e0d-6f45-64a1-ca76-000000004843 | TASK | Enable virtqemud recovery trigger service 2026-01-22 13:04:24.449755 | fa163e0d-6f45-64a1-ca76-000000004843 | CHANGED | Enable virtqemud recovery trigger service | standalone 2026-01-22 13:04:24.452128 | fa163e0d-6f45-64a1-ca76-000000004843 | TIMING | Enable virtqemud recovery trigger service | standalone | 0:25:18.417126 | 1.01s 2026-01-22 13:04:24.483836 | fa163e0d-6f45-64a1-ca76-000000004844 | TASK | Enable virtqemud recovery trigger timer 2026-01-22 13:04:25.485898 | fa163e0d-6f45-64a1-ca76-000000004844 | CHANGED | Enable virtqemud recovery trigger timer | standalone 2026-01-22 13:04:25.487076 | fa163e0d-6f45-64a1-ca76-000000004844 | TIMING | Enable virtqemud recovery trigger timer | standalone | 0:25:19.452091 | 1.00s 2026-01-22 13:04:25.508474 | fa163e0d-6f45-64a1-ca76-000000004845 | TASK | Really enable virtqemud recovery trigger timer 2026-01-22 13:04:26.097485 | fa163e0d-6f45-64a1-ca76-000000004845 | CHANGED | Really enable virtqemud recovery trigger timer | standalone 2026-01-22 13:04:26.098790 | fa163e0d-6f45-64a1-ca76-000000004845 | TIMING | Really enable virtqemud recovery trigger timer | standalone | 0:25:20.063798 | 0.59s 2026-01-22 13:04:26.126725 | fa163e0d-6f45-64a1-ca76-000000004847 | TASK | Create systemd file 2026-01-22 13:04:26.646812 | fa163e0d-6f45-64a1-ca76-000000004847 | CHANGED | Create systemd file | standalone 2026-01-22 13:04:26.648287 | fa163e0d-6f45-64a1-ca76-000000004847 | TIMING | Create systemd file | standalone | 0:25:20.613290 | 0.52s 2026-01-22 13:04:26.677239 | fa163e0d-6f45-64a1-ca76-000000004848 | TASK | Reload systemd 2026-01-22 13:04:27.388174 | fa163e0d-6f45-64a1-ca76-000000004848 | CHANGED | Reload systemd | standalone 2026-01-22 13:04:27.389555 | fa163e0d-6f45-64a1-ca76-000000004848 | TIMING | Reload systemd | standalone | 0:25:21.354563 | 0.71s 2026-01-22 13:04:27.417868 | fa163e0d-6f45-64a1-ca76-00000000484b | TASK | Start OVN container 2026-01-22 13:04:27.482556 | fa163e0d-6f45-64a1-ca76-00000000484b | SKIPPED | Start OVN container | standalone | item=ovn_cluster_north_db_server 2026-01-22 13:04:27.511635 | fa163e0d-6f45-64a1-ca76-00000000484b | SKIPPED | Start OVN container | standalone | item=ovn_cluster_south_db_server 2026-01-22 13:04:27.525522 | fa163e0d-6f45-64a1-ca76-00000000484b | SKIPPED | Start OVN container | standalone | item=ovn_cluster_northd 2026-01-22 13:04:27.537439 | fa163e0d-6f45-64a1-ca76-00000000484b | TIMING | Start OVN container | standalone | 0:25:21.502439 | 0.12s 2026-01-22 13:04:27.558889 | fa163e0d-6f45-64a1-ca76-00000000484d | TASK | Set NB connection 2026-01-22 13:04:27.607300 | fa163e0d-6f45-64a1-ca76-00000000484d | SKIPPED | Set NB connection | standalone 2026-01-22 13:04:27.608147 | fa163e0d-6f45-64a1-ca76-00000000484d | TIMING | Set NB connection | standalone | 0:25:21.573160 | 0.05s 2026-01-22 13:04:27.630400 | fa163e0d-6f45-64a1-ca76-00000000484e | TASK | Set SB connection 2026-01-22 13:04:27.682683 | fa163e0d-6f45-64a1-ca76-00000000484e | SKIPPED | Set SB connection | standalone 2026-01-22 13:04:27.684015 | fa163e0d-6f45-64a1-ca76-00000000484e | TIMING | Set SB connection | standalone | 0:25:21.649020 | 0.05s 2026-01-22 13:04:27.712586 | fa163e0d-6f45-64a1-ca76-0000000000e6 | TASK | Check if /var/lib/tripleo-config/container-startup-config/step_4 already exists 2026-01-22 13:04:27.947064 | fa163e0d-6f45-64a1-ca76-0000000000e6 | OK | Check if /var/lib/tripleo-config/container-startup-config/step_4 already exists | standalone 2026-01-22 13:04:27.948600 | fa163e0d-6f45-64a1-ca76-0000000000e6 | TIMING | Check if /var/lib/tripleo-config/container-startup-config/step_4 already exists | standalone | 0:25:21.913609 | 0.23s 2026-01-22 13:04:28.061791 | fa163e0d-6f45-64a1-ca76-0000000000e7 | TIMING | include_tasks | standalone | 0:25:22.026789 | 0.08s 2026-01-22 13:04:28.099335 | ad0be229-cf1e-4f68-b337-4003c32a5854 | INCLUDED | /root/standalone-ansible-mz1ymllk/common_deploy_steps_tasks.yaml | standalone 2026-01-22 13:04:28.125809 | fa163e0d-6f45-64a1-ca76-00000000487d | TASK | Write the config_step hieradata 2026-01-22 13:04:28.585193 | fa163e0d-6f45-64a1-ca76-00000000487d | OK | Write the config_step hieradata | standalone 2026-01-22 13:04:28.586350 | fa163e0d-6f45-64a1-ca76-00000000487d | TIMING | Write the config_step hieradata | standalone | 0:25:22.551359 | 0.46s 2026-01-22 13:04:28.616305 | fa163e0d-6f45-64a1-ca76-00000000487e | TASK | Run puppet host configuration for step 4 2026-01-22 13:04:28.889790 | fa163e0d-6f45-64a1-ca76-00000000487e | CHANGED | Run puppet host configuration for step 4 | standalone 2026-01-22 13:04:28.891017 | fa163e0d-6f45-64a1-ca76-00000000487e | TIMING | Run puppet host configuration for step 4 | standalone | 0:25:22.856025 | 0.27s 2026-01-22 13:04:28.918886 | fa163e0d-6f45-64a1-ca76-00000000487f | TASK | Wait for puppet host configuration to finish 2026-01-22 13:04:29.179951 | fa163e0d-6f45-64a1-ca76-00000000487f | WAITING | Wait for puppet host configuration to finish | standalone | 360 retries left 2026-01-22 13:04:39.363419 | fa163e0d-6f45-64a1-ca76-00000000487f | WAITING | Wait for puppet host configuration to finish | standalone | 359 retries left 2026-01-22 13:04:49.607215 | fa163e0d-6f45-64a1-ca76-00000000487f | WAITING | Wait for puppet host configuration to finish | standalone | 358 retries left 2026-01-22 13:04:59.835276 | fa163e0d-6f45-64a1-ca76-00000000487f | CHANGED | Wait for puppet host configuration to finish | standalone 2026-01-22 13:04:59.838210 | fa163e0d-6f45-64a1-ca76-00000000487f | TIMING | Wait for puppet host configuration to finish | standalone | 0:25:53.803202 | 30.92s 2026-01-22 13:04:59.869087 | fa163e0d-6f45-64a1-ca76-000000004880 | TASK | Debug output for task: Run puppet host configuration for step 4 2026-01-22 13:04:59.952785 | fa163e0d-6f45-64a1-ca76-000000004880 | CHANGED | Debug output for task: Run puppet host configuration for step 4 | standalone | result={ "changed": true, "failed_when_result": false, "puppet_host_outputs.stdout_lines | default([]) | union(puppet_host_outputs.stderr_lines | default([]))": [ "<13>Jan 22 13:04:29 puppet-user: Warning: /etc/puppet/hiera.yaml: Use of 'hiera.yaml' version 3 is deprecated. It should be converted to version 5", "<13>Jan 22 13:04:35 puppet-user: (file: /etc/puppet/hiera.yaml)", "<13>Jan 22 13:04:35 puppet-user: Warning: Undefined variable '::deploy_config_name'; ", "<13>Jan 22 13:04:35 puppet-user: (file & line not available)", "<13>Jan 22 13:04:35 puppet-user: Warning: The function 'hiera' is deprecated in favor of using 'lookup'. See https://puppet.com/docs/puppet/7.10/deprecated_language.html", "<13>Jan 22 13:04:35 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/profile/base/database/mysql/client.pp, line: 89, column: 8)", "<13>Jan 22 13:04:36 puppet-user: Warning: This method is deprecated, please use match expressions with Stdlib::Compat::String instead. They are described at https://docs.puppet.com/puppet/latest/reference/lang_data_type.html#match-expressions. at [\"/etc/puppet/modules/snmp/manifests/params.pp\", 310]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:04:36 puppet-user: (location: /etc/puppet/modules/stdlib/lib/puppet/functions/deprecation.rb:34:in `deprecation')", "<13>Jan 22 13:04:36 puppet-user: Warning: This method is deprecated, please use the stdlib validate_legacy function,", "<13>Jan 22 13:04:36 puppet-user: with Stdlib::Compat::Bool. There is further documentation for validate_legacy function in the README. at [\"/etc/puppet/modules/snmp/manifests/init.pp\", 358]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:04:36 puppet-user: with Stdlib::Compat::Array. There is further documentation for validate_legacy function in the README. at [\"/etc/puppet/modules/snmp/manifests/init.pp\", 367]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:04:36 puppet-user: with Stdlib::Compat::String. There is further documentation for validate_legacy function in the README. at [\"/etc/puppet/modules/snmp/manifests/init.pp\", 382]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:04:36 puppet-user: with Stdlib::Compat::Numeric. There is further documentation for validate_legacy function in the README. at [\"/etc/puppet/modules/snmp/manifests/init.pp\", 388]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:04:36 puppet-user: with Pattern[]. There is further documentation for validate_legacy function in the README. at [\"/etc/puppet/modules/snmp/manifests/init.pp\", 393]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:04:36 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/packages.pp, line: 39, column: 69)", "<13>Jan 22 13:04:36 puppet-user: Notice: Compiled catalog for standalone.ooo.test in environment production in 0.50 seconds", "<13>Jan 22 13:04:40 puppet-user: Notice: /Stage[main]/Pacemaker::Resource_defaults/Pcmk_resource_default[resource-stickiness]/ensure: created", "<13>Jan 22 13:04:42 puppet-user: Notice: /Stage[main]/Pacemaker::Resource_op_defaults/Pcmk_resource_op_default[bundle]/ensure: created", "<13>Jan 22 13:04:48 puppet-user: Notice: /Stage[main]/Snmp/Package[snmpd]/ensure: created", "<13>Jan 22 13:04:48 puppet-user: Notice: /Stage[main]/Snmp/File[snmpd.conf]/content: content changed '{sha256}2b743f970e80e2150759bfc66f2d8d0fbd8b31624f79e2991248d1a5ac57494e' to '{sha256}da3959f7a60a07616dd85066ff77229190a7d85b40d4df752d54f09a08ffbd1e'", "<13>Jan 22 13:04:48 puppet-user: Notice: /Stage[main]/Snmp/File[snmpd.sysconfig]/content: content changed '{sha256}b63afb2dee7419b6834471f88581d981c8ae5c8b27b9d329ba67a02f3ddd8221' to '{sha256}3917ee8bbc680ad50d77186ad4a1d2705c2025c32fc32f823abbda7f2328dfbd'", "<13>Jan 22 13:04:48 puppet-user: Notice: /Stage[main]/Snmp/File[snmptrapd.conf]/content: content changed '{sha256}2e1ca894d609ef337b6243909bf5623c87fd5df98ecbd00c7d4c12cf12f03c4e' to '{sha256}3ecf18da1ba84ea3932607f2b903ee6a038b6f9ac4e1e371e48f3ef61c5052ea'", "<13>Jan 22 13:04:48 puppet-user: Notice: /Stage[main]/Snmp/File[snmptrapd.sysconfig]/content: content changed '{sha256}86ee5797ad10cb1ea0f631e9dfa6ae278ecf4f4d16f4c80f831cdde45601b23c' to '{sha256}2244553364afcca151958f8e2003e4c182f5e2ecfbe55405cec73fd818581e97'", "<13>Jan 22 13:04:48 puppet-user: Notice: /Stage[main]/Snmp/Service[snmptrapd]: Triggered 'refresh' from 2 events", "<13>Jan 22 13:04:49 puppet-user: Deprecation Warning: This command is deprecated and will be removed. Please use 'pcs property config' instead.", "<13>Jan 22 13:04:54 puppet-user: Notice: /Stage[main]/Tripleo::Profile::Base::Snmp/Snmp::Snmpv3_user[ro_snmp_user]/Exec[create-snmpv3-user-ro_snmp_user]/returns: executed successfully", "<13>Jan 22 13:04:56 puppet-user: Notice: /Stage[main]/Snmp/Service[snmpd]/ensure: ensure changed 'stopped' to 'running'", "<13>Jan 22 13:04:56 puppet-user: Notice: Applied catalog in 19.56 seconds", "<13>Jan 22 13:04:56 puppet-user: Application:", "<13>Jan 22 13:04:56 puppet-user: Initial environment: production", "<13>Jan 22 13:04:56 puppet-user: Converged environment: production", "<13>Jan 22 13:04:56 puppet-user: Run mode: user", "<13>Jan 22 13:04:56 puppet-user: Changes:", "<13>Jan 22 13:04:56 puppet-user: Total: 10", "<13>Jan 22 13:04:56 puppet-user: Events:", "<13>Jan 22 13:04:56 puppet-user: Success: 10", "<13>Jan 22 13:04:56 puppet-user: Resources:", "<13>Jan 22 13:04:56 puppet-user: Restarted: 1", "<13>Jan 22 13:04:56 puppet-user: Changed: 10", "<13>Jan 22 13:04:56 puppet-user: Out of sync: 10", "<13>Jan 22 13:04:56 puppet-user: Total: 37", "<13>Jan 22 13:04:56 puppet-user: Time:", "<13>Jan 22 13:04:56 puppet-user: Filebucket: 0.00", "<13>Jan 22 13:04:56 puppet-user: Schedule: 0.00", "<13>Jan 22 13:04:56 puppet-user: File line: 0.00", "<13>Jan 22 13:04:56 puppet-user: Augeas: 0.01", "<13>Jan 22 13:04:56 puppet-user: User: 0.01", "<13>Jan 22 13:04:56 puppet-user: File: 0.10", "<13>Jan 22 13:04:56 puppet-user: Config retrieval: 0.57", "<13>Jan 22 13:04:56 puppet-user: Service: 1.35", "<13>Jan 22 13:04:56 puppet-user: Pcmk property: 1.49", "<13>Jan 22 13:04:56 puppet-user: Last run: 1769087096", "<13>Jan 22 13:04:56 puppet-user: Transaction evaluation: 19.55", "<13>Jan 22 13:04:56 puppet-user: Catalog application: 19.56", "<13>Jan 22 13:04:56 puppet-user: Pcmk resource op default: 2.00", "<13>Jan 22 13:04:56 puppet-user: Pcmk resource default: 2.11", "<13>Jan 22 13:04:56 puppet-user: Package: 5.20", "<13>Jan 22 13:04:56 puppet-user: Exec: 7.06", "<13>Jan 22 13:04:56 puppet-user: Total: 19.57", "<13>Jan 22 13:04:56 puppet-user: Version:", "<13>Jan 22 13:04:56 puppet-user: Config: 1769087075", "<13>Jan 22 13:04:56 puppet-user: Puppet: 7.10.0" ] } 2026-01-22 13:04:59.954281 | fa163e0d-6f45-64a1-ca76-000000004880 | TIMING | Debug output for task: Run puppet host configuration for step 4 | standalone | 0:25:53.919288 | 0.08s 2026-01-22 13:04:59.983498 | fa163e0d-6f45-64a1-ca76-000000004881 | TASK | Pre-cache facts for puppet containers 2026-01-22 13:05:00.013693 | fa163e0d-6f45-64a1-ca76-000000004881 | TIMING | Pre-cache facts for puppet containers | standalone | 0:25:53.978670 | 0.03s 2026-01-22 13:05:00.084152 | fa163e0d-6f45-64a1-ca76-0000000048c2 | TASK | Gather variables for each operating system 2026-01-22 13:05:00.196891 | fa163e0d-6f45-64a1-ca76-0000000048c2 | TIMING | tripleo_puppet_cache : Gather variables for each operating system | standalone | 0:25:54.161891 | 0.11s 2026-01-22 13:05:00.229048 | fa163e0d-6f45-64a1-ca76-0000000048c3 | TASK | Create puppet caching structures 2026-01-22 13:05:00.918950 | fa163e0d-6f45-64a1-ca76-0000000048c3 | CHANGED | Create puppet caching structures | standalone 2026-01-22 13:05:00.920089 | fa163e0d-6f45-64a1-ca76-0000000048c3 | TIMING | tripleo_puppet_cache : Create puppet caching structures | standalone | 0:25:54.885099 | 0.69s 2026-01-22 13:05:00.942927 | fa163e0d-6f45-64a1-ca76-0000000048c4 | TASK | Check for facter.conf 2026-01-22 13:05:01.211258 | fa163e0d-6f45-64a1-ca76-0000000048c4 | OK | Check for facter.conf | standalone 2026-01-22 13:05:01.212348 | fa163e0d-6f45-64a1-ca76-0000000048c4 | TIMING | tripleo_puppet_cache : Check for facter.conf | standalone | 0:25:55.177360 | 0.27s 2026-01-22 13:05:01.234917 | fa163e0d-6f45-64a1-ca76-0000000048c5 | TASK | Remove facter.conf if directory 2026-01-22 13:05:01.288177 | fa163e0d-6f45-64a1-ca76-0000000048c5 | SKIPPED | Remove facter.conf if directory | standalone 2026-01-22 13:05:01.289165 | fa163e0d-6f45-64a1-ca76-0000000048c5 | TIMING | tripleo_puppet_cache : Remove facter.conf if directory | standalone | 0:25:55.254179 | 0.05s 2026-01-22 13:05:01.312485 | fa163e0d-6f45-64a1-ca76-0000000048c6 | TASK | Write facter cache config 2026-01-22 13:05:01.817206 | fa163e0d-6f45-64a1-ca76-0000000048c6 | CHANGED | Write facter cache config | standalone 2026-01-22 13:05:01.818280 | fa163e0d-6f45-64a1-ca76-0000000048c6 | TIMING | tripleo_puppet_cache : Write facter cache config | standalone | 0:25:55.783293 | 0.50s 2026-01-22 13:05:01.838158 | fa163e0d-6f45-64a1-ca76-0000000048c7 | TASK | Cleanup facter cache if exists 2026-01-22 13:05:02.083883 | fa163e0d-6f45-64a1-ca76-0000000048c7 | CHANGED | Cleanup facter cache if exists | standalone 2026-01-22 13:05:02.085402 | fa163e0d-6f45-64a1-ca76-0000000048c7 | TIMING | tripleo_puppet_cache : Cleanup facter cache if exists | standalone | 0:25:56.050409 | 0.25s 2026-01-22 13:05:02.114979 | fa163e0d-6f45-64a1-ca76-0000000048c8 | TASK | Pre-cache facts 2026-01-22 13:05:02.744691 | fa163e0d-6f45-64a1-ca76-0000000048c8 | CHANGED | Pre-cache facts | standalone 2026-01-22 13:05:02.745996 | fa163e0d-6f45-64a1-ca76-0000000048c8 | TIMING | tripleo_puppet_cache : Pre-cache facts | standalone | 0:25:56.711000 | 0.63s 2026-01-22 13:05:02.778558 | fa163e0d-6f45-64a1-ca76-0000000048c9 | TASK | Failed deployment if facter fails 2026-01-22 13:05:02.820876 | fa163e0d-6f45-64a1-ca76-0000000048c9 | SKIPPED | Failed deployment if facter fails | standalone 2026-01-22 13:05:02.822275 | fa163e0d-6f45-64a1-ca76-0000000048c9 | TIMING | tripleo_puppet_cache : Failed deployment if facter fails | standalone | 0:25:56.787280 | 0.04s 2026-01-22 13:05:02.868281 | fa163e0d-6f45-64a1-ca76-0000000048ca | TASK | Sync cached facts 2026-01-22 13:05:03.182276 | fa163e0d-6f45-64a1-ca76-0000000048ca | CHANGED | Sync cached facts | standalone -> 192.168.122.100 [WARNING]: ('standalone -> 192.168.122.100', 'fa163e0d-6f45-64a1-ca76-0000000048ca') missing from stats 2026-01-22 13:05:03.230819 | fa163e0d-6f45-64a1-ca76-000000004882 | TASK | Include container-puppet tasks (generate config) during step 1 2026-01-22 13:05:03.284203 | fa163e0d-6f45-64a1-ca76-000000004882 | SKIPPED | Include container-puppet tasks (generate config) during step 1 | standalone 2026-01-22 13:05:03.285622 | fa163e0d-6f45-64a1-ca76-000000004882 | TIMING | Include container-puppet tasks (generate config) during step 1 | standalone | 0:25:57.250630 | 0.05s 2026-01-22 13:05:03.316511 | fa163e0d-6f45-64a1-ca76-000000004884 | TASK | Manage containers for step 4 with tripleo-ansible 2026-01-22 13:05:03.344894 | fa163e0d-6f45-64a1-ca76-000000004884 | TIMING | Manage containers for step 4 with tripleo-ansible | standalone | 0:25:57.309898 | 0.03s 2026-01-22 13:05:03.410597 | fa163e0d-6f45-64a1-ca76-000000004921 | TASK | Gather variables for each operating system 2026-01-22 13:05:03.526575 | fa163e0d-6f45-64a1-ca76-000000004921 | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:25:57.491571 | 0.11s 2026-01-22 13:05:03.556257 | fa163e0d-6f45-64a1-ca76-000000004922 | TASK | Create container logs path 2026-01-22 13:05:03.814337 | fa163e0d-6f45-64a1-ca76-000000004922 | OK | Create container logs path | standalone 2026-01-22 13:05:03.815946 | fa163e0d-6f45-64a1-ca76-000000004922 | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:25:57.780951 | 0.26s 2026-01-22 13:05:03.847787 | fa163e0d-6f45-64a1-ca76-000000004924 | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_4 2026-01-22 13:05:04.157743 | fa163e0d-6f45-64a1-ca76-000000004924 | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_4 | standalone 2026-01-22 13:05:04.159166 | fa163e0d-6f45-64a1-ca76-000000004924 | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_4 | standalone | 0:25:58.124172 | 0.31s 2026-01-22 13:05:04.187041 | fa163e0d-6f45-64a1-ca76-000000004925 | TASK | Finalise hashes for all containers 2026-01-22 13:05:04.300112 | fa163e0d-6f45-64a1-ca76-000000004925 | OK | Finalise hashes for all containers | standalone 2026-01-22 13:05:04.301464 | fa163e0d-6f45-64a1-ca76-000000004925 | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:25:58.266471 | 0.11s 2026-01-22 13:05:04.333499 | fa163e0d-6f45-64a1-ca76-000000004927 | TASK | Manage systemd shutdown files 2026-01-22 13:05:04.363764 | fa163e0d-6f45-64a1-ca76-000000004927 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:25:58.328763 | 0.03s 2026-01-22 13:05:04.394797 | af9df7b2-d844-42c4-884f-d6a115c80e4f | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/shutdown.yml | standalone 2026-01-22 13:05:04.422496 | fa163e0d-6f45-64a1-ca76-00000000495a | TASK | Check if /etc/sysconfig/podman_drop_in exists 2026-01-22 13:05:04.703948 | fa163e0d-6f45-64a1-ca76-00000000495a | OK | Check if /etc/sysconfig/podman_drop_in exists | standalone 2026-01-22 13:05:04.704903 | fa163e0d-6f45-64a1-ca76-00000000495a | TIMING | tripleo_container_manage : Check if /etc/sysconfig/podman_drop_in exists | standalone | 0:25:58.669919 | 0.28s 2026-01-22 13:05:04.724733 | fa163e0d-6f45-64a1-ca76-00000000495b | TASK | Set podman_drop_in fact 2026-01-22 13:05:04.785879 | fa163e0d-6f45-64a1-ca76-00000000495b | OK | Set podman_drop_in fact | standalone 2026-01-22 13:05:04.787108 | fa163e0d-6f45-64a1-ca76-00000000495b | TIMING | tripleo_container_manage : Set podman_drop_in fact | standalone | 0:25:58.752114 | 0.06s 2026-01-22 13:05:04.817274 | fa163e0d-6f45-64a1-ca76-00000000495d | TASK | Deploy tripleo-container-shutdown and tripleo-start-podman-container 2026-01-22 13:05:05.330144 | fa163e0d-6f45-64a1-ca76-00000000495d | OK | Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | item=tripleo-container-shutdown 2026-01-22 13:05:05.331898 | fa163e0d-6f45-64a1-ca76-00000000495d | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:25:59.296908 | 0.51s 2026-01-22 13:05:05.767328 | fa163e0d-6f45-64a1-ca76-00000000495d | OK | Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | item=tripleo-start-podman-container 2026-01-22 13:05:05.768528 | fa163e0d-6f45-64a1-ca76-00000000495d | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:25:59.733536 | 0.95s 2026-01-22 13:05:05.785954 | fa163e0d-6f45-64a1-ca76-00000000495d | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:25:59.750965 | 0.97s 2026-01-22 13:05:05.805705 | fa163e0d-6f45-64a1-ca76-00000000495e | TASK | Create /usr/lib/systemd/system/tripleo-container-shutdown.service 2026-01-22 13:05:06.292743 | fa163e0d-6f45-64a1-ca76-00000000495e | OK | Create /usr/lib/systemd/system/tripleo-container-shutdown.service | standalone 2026-01-22 13:05:06.295238 | fa163e0d-6f45-64a1-ca76-00000000495e | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system/tripleo-container-shutdown.service | standalone | 0:26:00.260245 | 0.49s 2026-01-22 13:05:06.324809 | fa163e0d-6f45-64a1-ca76-00000000495f | TASK | Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset 2026-01-22 13:05:06.828527 | fa163e0d-6f45-64a1-ca76-00000000495f | OK | Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset | standalone 2026-01-22 13:05:06.829291 | fa163e0d-6f45-64a1-ca76-00000000495f | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset | standalone | 0:26:00.794308 | 0.50s 2026-01-22 13:05:06.848410 | fa163e0d-6f45-64a1-ca76-000000004960 | TASK | Enable and start tripleo-container-shutdown 2026-01-22 13:05:07.555186 | fa163e0d-6f45-64a1-ca76-000000004960 | OK | Enable and start tripleo-container-shutdown | standalone 2026-01-22 13:05:07.556856 | fa163e0d-6f45-64a1-ca76-000000004960 | TIMING | tripleo_container_manage : Enable and start tripleo-container-shutdown | standalone | 0:26:01.521868 | 0.71s 2026-01-22 13:05:07.580951 | fa163e0d-6f45-64a1-ca76-000000004961 | TASK | Create /usr/lib/systemd/system/netns-placeholder.service 2026-01-22 13:05:08.024339 | fa163e0d-6f45-64a1-ca76-000000004961 | OK | Create /usr/lib/systemd/system/netns-placeholder.service | standalone 2026-01-22 13:05:08.025724 | fa163e0d-6f45-64a1-ca76-000000004961 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system/netns-placeholder.service | standalone | 0:26:01.990731 | 0.44s 2026-01-22 13:05:08.054336 | fa163e0d-6f45-64a1-ca76-000000004962 | TASK | Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset 2026-01-22 13:05:08.527008 | fa163e0d-6f45-64a1-ca76-000000004962 | OK | Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset | standalone 2026-01-22 13:05:08.528426 | fa163e0d-6f45-64a1-ca76-000000004962 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset | standalone | 0:26:02.493434 | 0.47s 2026-01-22 13:05:08.558102 | fa163e0d-6f45-64a1-ca76-000000004963 | TASK | Enable and start netns-placeholder 2026-01-22 13:05:09.309131 | fa163e0d-6f45-64a1-ca76-000000004963 | CHANGED | Enable and start netns-placeholder | standalone 2026-01-22 13:05:09.311941 | fa163e0d-6f45-64a1-ca76-000000004963 | TIMING | tripleo_container_manage : Enable and start netns-placeholder | standalone | 0:26:03.276935 | 0.75s 2026-01-22 13:05:09.343702 | fa163e0d-6f45-64a1-ca76-000000004929 | TASK | Update container configs with new config hashes 2026-01-22 13:05:09.404685 | fa163e0d-6f45-64a1-ca76-000000004929 | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:26:03.369668 | 0.06s 2026-01-22 13:05:09.417632 | a8b95903-e2f6-4ade-b8dd-3fb798766402 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/puppet_config.yml | standalone 2026-01-22 13:05:09.441860 | fa163e0d-6f45-64a1-ca76-00000000498b | TASK | Update config hashes for container startup configs 2026-01-22 13:05:09.735443 | fa163e0d-6f45-64a1-ca76-00000000498b | OK | Update config hashes for container startup configs | standalone 2026-01-22 13:05:09.736431 | fa163e0d-6f45-64a1-ca76-00000000498b | TIMING | tripleo_container_manage : Update config hashes for container startup configs | standalone | 0:26:03.701445 | 0.29s 2026-01-22 13:05:09.760468 | fa163e0d-6f45-64a1-ca76-00000000492a | TASK | Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_4 2026-01-22 13:05:09.821053 | fa163e0d-6f45-64a1-ca76-00000000492a | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_4 | standalone | 0:26:03.786045 | 0.06s 2026-01-22 13:05:09.845685 | 4898d035-84ee-485d-a318-183efabc3d4e | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/delete_orphan.yml | standalone 2026-01-22 13:05:09.882156 | fa163e0d-6f45-64a1-ca76-0000000049aa | TASK | Gather podman infos 2026-01-22 13:05:11.384422 | fa163e0d-6f45-64a1-ca76-0000000049aa | OK | Gather podman infos | standalone 2026-01-22 13:05:11.385155 | fa163e0d-6f45-64a1-ca76-0000000049aa | TIMING | tripleo_container_manage : Gather podman infos | standalone | 0:26:05.350174 | 1.50s 2026-01-22 13:05:11.552002 | fa163e0d-6f45-64a1-ca76-0000000049ab | TASK | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_4 2026-01-22 13:05:11.632293 | fa163e0d-6f45-64a1-ca76-0000000049ab | TIMING | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_4 | standalone | 0:26:05.597288 | 0.08s 2026-01-22 13:05:11.801440 | fa163e0d-6f45-64a1-ca76-0000000049f4 | TIMING | tripleo_container_rm : include_tasks | standalone | 0:26:05.766443 | 0.11s 2026-01-22 13:05:11.848965 | fa163e0d-6f45-64a1-ca76-00000000492b | TASK | Create containers from /var/lib/tripleo-config/container-startup-config/step_4 2026-01-22 13:05:11.911854 | fa163e0d-6f45-64a1-ca76-00000000492b | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-startup-config/step_4 | standalone | 0:26:05.876848 | 0.06s 2026-01-22 13:05:11.939666 | c78899ef-6b13-4b93-8f1f-fd4a2fe9b18f | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/create.yml | standalone 2026-01-22 13:05:11.973247 | fa163e0d-6f45-64a1-ca76-000000004a1a | TASK | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_4 2026-01-22 13:05:33.579060 | fa163e0d-6f45-64a1-ca76-000000004a1a | CHANGED | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_4 | standalone 2026-01-22 13:05:33.580752 | fa163e0d-6f45-64a1-ca76-000000004a1a | TIMING | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_4 | standalone | 0:26:27.545763 | 21.61s 2026-01-22 13:05:33.605963 | fa163e0d-6f45-64a1-ca76-000000004a1b | TASK | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_4 2026-01-22 13:07:41.552635 | fa163e0d-6f45-64a1-ca76-000000004a1b | CHANGED | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_4 | standalone 2026-01-22 13:07:41.555718 | fa163e0d-6f45-64a1-ca76-000000004a1b | TIMING | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_4 | standalone | 0:28:35.520725 | 127.95s 2026-01-22 13:07:41.630160 | fa163e0d-6f45-64a1-ca76-000000004886 | TASK | Clean container_puppet_tasks for standalone step 4 2026-01-22 13:07:41.871127 | fa163e0d-6f45-64a1-ca76-000000004886 | OK | Clean container_puppet_tasks for standalone step 4 | standalone 2026-01-22 13:07:41.872857 | fa163e0d-6f45-64a1-ca76-000000004886 | TIMING | Clean container_puppet_tasks for standalone step 4 | standalone | 0:28:35.837872 | 0.24s 2026-01-22 13:07:41.906613 | fa163e0d-6f45-64a1-ca76-000000004887 | TASK | Calculate container_puppet_tasks for standalone step 4 2026-01-22 13:07:41.965784 | fa163e0d-6f45-64a1-ca76-000000004887 | TIMING | Calculate container_puppet_tasks for standalone step 4 | standalone | 0:28:35.930791 | 0.06s 2026-01-22 13:07:41.989235 | fa163e0d-6f45-64a1-ca76-000000004888 | TASK | Include container-puppet tasks for step 4 2026-01-22 13:07:42.038770 | fa163e0d-6f45-64a1-ca76-000000004888 | TIMING | Include container-puppet tasks for step 4 | standalone | 0:28:36.003771 | 0.05s 2026-01-22 13:07:42.065259 | 2e47a783-6a56-4828-9218-a8654def8d23 | INCLUDED | /root/standalone-ansible-mz1ymllk/host-container-puppet-tasks.yaml | standalone 2026-01-22 13:07:42.103824 | fa163e0d-6f45-64a1-ca76-000000004a66 | TASK | Write container-puppet-tasks json file for standalone step 4 2026-01-22 13:07:42.649298 | fa163e0d-6f45-64a1-ca76-000000004a66 | CHANGED | Write container-puppet-tasks json file for standalone step 4 | standalone 2026-01-22 13:07:42.650251 | fa163e0d-6f45-64a1-ca76-000000004a66 | TIMING | Write container-puppet-tasks json file for standalone step 4 | standalone | 0:28:36.615264 | 0.54s 2026-01-22 13:07:42.674459 | fa163e0d-6f45-64a1-ca76-000000004a68 | TASK | Generate container puppet configs for step 4 2026-01-22 13:07:42.949267 | fa163e0d-6f45-64a1-ca76-000000004a68 | OK | Generate container puppet configs for step 4 | standalone 2026-01-22 13:07:42.950156 | fa163e0d-6f45-64a1-ca76-000000004a68 | TIMING | Generate container puppet configs for step 4 | standalone | 0:28:36.915173 | 0.27s 2026-01-22 13:07:42.970606 | fa163e0d-6f45-64a1-ca76-000000004a69 | TASK | Manage Puppet containers (bootstrap tasks) for step 4 with tripleo-ansible 2026-01-22 13:07:42.998801 | fa163e0d-6f45-64a1-ca76-000000004a69 | TIMING | Manage Puppet containers (bootstrap tasks) for step 4 with tripleo-ansible | standalone | 0:28:36.963814 | 0.03s 2026-01-22 13:07:43.043879 | fa163e0d-6f45-64a1-ca76-000000004a98 | TASK | Gather variables for each operating system 2026-01-22 13:07:43.167506 | fa163e0d-6f45-64a1-ca76-000000004a98 | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:28:37.132494 | 0.12s 2026-01-22 13:07:43.198959 | fa163e0d-6f45-64a1-ca76-000000004a99 | TASK | Create container logs path 2026-01-22 13:07:43.503352 | fa163e0d-6f45-64a1-ca76-000000004a99 | OK | Create container logs path | standalone 2026-01-22 13:07:43.504957 | fa163e0d-6f45-64a1-ca76-000000004a99 | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:28:37.469959 | 0.30s 2026-01-22 13:07:43.540954 | fa163e0d-6f45-64a1-ca76-000000004a9b | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_4 2026-01-22 13:07:43.795564 | fa163e0d-6f45-64a1-ca76-000000004a9b | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_4 | standalone 2026-01-22 13:07:43.796824 | fa163e0d-6f45-64a1-ca76-000000004a9b | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_4 | standalone | 0:28:37.761833 | 0.25s 2026-01-22 13:07:43.825929 | fa163e0d-6f45-64a1-ca76-000000004a9c | TASK | Finalise hashes for all containers 2026-01-22 13:07:43.877846 | fa163e0d-6f45-64a1-ca76-000000004a9c | OK | Finalise hashes for all containers | standalone 2026-01-22 13:07:43.879121 | fa163e0d-6f45-64a1-ca76-000000004a9c | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:28:37.844130 | 0.05s 2026-01-22 13:07:43.911976 | fa163e0d-6f45-64a1-ca76-000000004a9e | TASK | Manage systemd shutdown files 2026-01-22 13:07:43.959604 | fa163e0d-6f45-64a1-ca76-000000004a9e | SKIPPED | Manage systemd shutdown files | standalone 2026-01-22 13:07:43.960959 | fa163e0d-6f45-64a1-ca76-000000004a9e | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:28:37.925966 | 0.05s 2026-01-22 13:07:43.992415 | fa163e0d-6f45-64a1-ca76-000000004aa0 | TASK | Update container configs with new config hashes 2026-01-22 13:07:44.045049 | fa163e0d-6f45-64a1-ca76-000000004aa0 | SKIPPED | Update container configs with new config hashes | standalone 2026-01-22 13:07:44.046542 | fa163e0d-6f45-64a1-ca76-000000004aa0 | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:28:38.011543 | 0.05s 2026-01-22 13:07:44.084803 | fa163e0d-6f45-64a1-ca76-000000004aa1 | TASK | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_4 2026-01-22 13:07:44.138378 | fa163e0d-6f45-64a1-ca76-000000004aa1 | SKIPPED | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_4 | standalone 2026-01-22 13:07:44.139873 | fa163e0d-6f45-64a1-ca76-000000004aa1 | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_4 | standalone | 0:28:38.104872 | 0.05s 2026-01-22 13:07:44.182269 | fa163e0d-6f45-64a1-ca76-000000004aa2 | TASK | Create containers from /var/lib/tripleo-config/container-puppet-config/step_4 2026-01-22 13:07:44.234701 | fa163e0d-6f45-64a1-ca76-000000004aa2 | SKIPPED | Create containers from /var/lib/tripleo-config/container-puppet-config/step_4 | standalone 2026-01-22 13:07:44.235530 | fa163e0d-6f45-64a1-ca76-000000004aa2 | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-puppet-config/step_4 | standalone | 0:28:38.200542 | 0.05s PLAY [External deployment step 5] ********************************************** 2026-01-22 13:07:44.380253 | fa163e0d-6f45-64a1-ca76-0000000000ea | TASK | External deployment step 5 2026-01-22 13:07:44.423946 | fa163e0d-6f45-64a1-ca76-0000000000ea | OK | External deployment step 5 | undercloud -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'External deployment step 5' to resume from this task" } [WARNING]: ('undercloud -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000ea') missing from stats 2026-01-22 13:07:44.484902 | fa163e0d-6f45-64a1-ca76-0000000000eb | TIMING | include_tasks | undercloud | 0:28:38.449904 | 0.04s 2026-01-22 13:07:44.496697 | 5d59b6fa-beb5-4eb3-9c5a-8fe6884bb142 | INCLUDED | /root/standalone-ansible-mz1ymllk/external_deploy_steps_tasks_step5.yaml | undercloud 2026-01-22 13:07:44.515367 | fa163e0d-6f45-64a1-ca76-000000004af1 | TASK | Manage Cinder's default volume type 2026-01-22 13:07:54.664598 | fa163e0d-6f45-64a1-ca76-000000004af1 | OK | Manage Cinder's default volume type | undercloud 2026-01-22 13:07:54.666240 | fa163e0d-6f45-64a1-ca76-000000004af1 | TIMING | Manage Cinder's default volume type | undercloud | 0:28:48.631248 | 10.15s PLAY [Deploy step tasks for 5] ************************************************* 2026-01-22 13:07:54.822384 | fa163e0d-6f45-64a1-ca76-0000000000ee | TASK | Deploy step tasks for 5 2026-01-22 13:07:54.856029 | fa163e0d-6f45-64a1-ca76-0000000000ee | OK | Deploy step tasks for 5 | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Deploy step tasks for 5' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000ee') missing from stats 2026-01-22 13:07:54.885790 | fa163e0d-6f45-64a1-ca76-0000000000ef | TASK | Write the config_step hieradata for the deploy step 5 tasks 2026-01-22 13:07:55.421160 | fa163e0d-6f45-64a1-ca76-0000000000ef | CHANGED | Write the config_step hieradata for the deploy step 5 tasks | standalone 2026-01-22 13:07:55.422762 | fa163e0d-6f45-64a1-ca76-0000000000ef | TIMING | Write the config_step hieradata for the deploy step 5 tasks | standalone | 0:28:49.387769 | 0.54s [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: '{{ playbook_dir }}/{{ _task_file_path }}' is exists 2026-01-22 13:07:55.495554 | fa163e0d-6f45-64a1-ca76-0000000000f0 | TIMING | include_tasks | standalone | 0:28:49.460556 | 0.04s 2026-01-22 13:07:55.588989 | 1878cedc-7cc3-4655-9e23-e957ce84d3eb | INCLUDED | /root/standalone-ansible-mz1ymllk/Standalone/deploy_steps_tasks_step5.yaml | standalone 2026-01-22 13:07:55.628252 | fa163e0d-6f45-64a1-ca76-000000004b14 | TASK | Gather variables for each operating system 2026-01-22 13:07:55.744316 | fa163e0d-6f45-64a1-ca76-000000004b14 | TIMING | tripleo_ha_wrapper : Gather variables for each operating system | standalone | 0:28:49.709312 | 0.11s 2026-01-22 13:07:55.778621 | fa163e0d-6f45-64a1-ca76-000000004b15 | TASK | Detect if resource is being created or already exists 2026-01-22 13:07:56.546443 | fa163e0d-6f45-64a1-ca76-000000004b15 | CHANGED | Detect if resource is being created or already exists | standalone 2026-01-22 13:07:56.548056 | fa163e0d-6f45-64a1-ca76-000000004b15 | TIMING | tripleo_ha_wrapper : Detect if resource is being created or already exists | standalone | 0:28:50.513061 | 0.77s 2026-01-22 13:07:56.582735 | fa163e0d-6f45-64a1-ca76-000000004b16 | TASK | Run init bundle puppet on the host for cinder_backup 2026-01-22 13:08:21.407829 | fa163e0d-6f45-64a1-ca76-000000004b16 | CHANGED | Run init bundle puppet on the host for cinder_backup | standalone 2026-01-22 13:08:21.409051 | fa163e0d-6f45-64a1-ca76-000000004b16 | TIMING | tripleo_ha_wrapper : Run init bundle puppet on the host for cinder_backup | standalone | 0:29:15.374065 | 24.82s 2026-01-22 13:08:21.431075 | fa163e0d-6f45-64a1-ca76-000000004b17 | TASK | Check if /var/lib/config-data/puppet-generated/cinder exists 2026-01-22 13:08:21.637589 | fa163e0d-6f45-64a1-ca76-000000004b17 | OK | Check if /var/lib/config-data/puppet-generated/cinder exists | standalone 2026-01-22 13:08:21.639215 | fa163e0d-6f45-64a1-ca76-000000004b17 | TIMING | tripleo_ha_wrapper : Check if /var/lib/config-data/puppet-generated/cinder exists | standalone | 0:29:15.604223 | 0.21s 2026-01-22 13:08:21.672171 | fa163e0d-6f45-64a1-ca76-000000004b18 | TASK | Run pacemaker restart if the config file for the service changed 2026-01-22 13:08:22.119235 | fa163e0d-6f45-64a1-ca76-000000004b18 | CHANGED | Run pacemaker restart if the config file for the service changed | standalone 2026-01-22 13:08:22.120243 | fa163e0d-6f45-64a1-ca76-000000004b18 | TIMING | tripleo_ha_wrapper : Run pacemaker restart if the config file for the service changed | standalone | 0:29:16.085255 | 0.45s 2026-01-22 13:08:22.146286 | fa163e0d-6f45-64a1-ca76-000000004b19 | TASK | Check if an image update marker exists for bundle openstack-cinder-backup 2026-01-22 13:08:22.330563 | fa163e0d-6f45-64a1-ca76-000000004b19 | OK | Check if an image update marker exists for bundle openstack-cinder-backup | standalone 2026-01-22 13:08:22.331628 | fa163e0d-6f45-64a1-ca76-000000004b19 | TIMING | tripleo_ha_wrapper : Check if an image update marker exists for bundle openstack-cinder-backup | standalone | 0:29:16.296644 | 0.18s 2026-01-22 13:08:22.352186 | fa163e0d-6f45-64a1-ca76-000000004b1b | TASK | Get container image tag from marker for bundle openstack-cinder-backup 2026-01-22 13:08:22.392205 | fa163e0d-6f45-64a1-ca76-000000004b1b | SKIPPED | Get container image tag from marker for bundle openstack-cinder-backup | standalone 2026-01-22 13:08:22.392969 | fa163e0d-6f45-64a1-ca76-000000004b1b | TIMING | tripleo_ha_wrapper : Get container image tag from marker for bundle openstack-cinder-backup | standalone | 0:29:16.357986 | 0.04s 2026-01-22 13:08:22.414331 | fa163e0d-6f45-64a1-ca76-000000004b1c | TASK | Get container image tag from bundle openstack-cinder-backup 2026-01-22 13:08:22.457271 | fa163e0d-6f45-64a1-ca76-000000004b1c | SKIPPED | Get container image tag from bundle openstack-cinder-backup | standalone 2026-01-22 13:08:22.458060 | fa163e0d-6f45-64a1-ca76-000000004b1c | TIMING | tripleo_ha_wrapper : Get container image tag from bundle openstack-cinder-backup | standalone | 0:29:16.423077 | 0.04s 2026-01-22 13:08:22.480542 | fa163e0d-6f45-64a1-ca76-000000004b1d | TASK | Compare tags between marker and bundle openstack-cinder-backup 2026-01-22 13:08:22.518213 | fa163e0d-6f45-64a1-ca76-000000004b1d | SKIPPED | Compare tags between marker and bundle openstack-cinder-backup | standalone 2026-01-22 13:08:22.519374 | fa163e0d-6f45-64a1-ca76-000000004b1d | TIMING | tripleo_ha_wrapper : Compare tags between marker and bundle openstack-cinder-backup | standalone | 0:29:16.484389 | 0.04s 2026-01-22 13:08:22.541828 | fa163e0d-6f45-64a1-ca76-000000004b1e | TASK | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} 2026-01-22 13:08:22.592455 | fa163e0d-6f45-64a1-ca76-000000004b1e | SKIPPED | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone 2026-01-22 13:08:22.593429 | fa163e0d-6f45-64a1-ca76-000000004b1e | TIMING | tripleo_ha_wrapper : Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone | 0:29:16.558444 | 0.05s 2026-01-22 13:08:22.615022 | fa163e0d-6f45-64a1-ca76-000000004b1f | TASK | Remove update marker for bundle openstack-cinder-backup 2026-01-22 13:08:22.654712 | fa163e0d-6f45-64a1-ca76-000000004b1f | SKIPPED | Remove update marker for bundle openstack-cinder-backup | standalone 2026-01-22 13:08:22.655718 | fa163e0d-6f45-64a1-ca76-000000004b1f | TIMING | tripleo_ha_wrapper : Remove update marker for bundle openstack-cinder-backup | standalone | 0:29:16.620728 | 0.04s 2026-01-22 13:08:22.701434 | fa163e0d-6f45-64a1-ca76-000000004b40 | TASK | Gather variables for each operating system 2026-01-22 13:08:22.817528 | fa163e0d-6f45-64a1-ca76-000000004b40 | TIMING | tripleo_ha_wrapper : Gather variables for each operating system | standalone | 0:29:16.782533 | 0.12s 2026-01-22 13:08:22.840637 | fa163e0d-6f45-64a1-ca76-000000004b41 | TASK | Detect if resource is being created or already exists 2026-01-22 13:08:23.601613 | fa163e0d-6f45-64a1-ca76-000000004b41 | CHANGED | Detect if resource is being created or already exists | standalone 2026-01-22 13:08:23.603314 | fa163e0d-6f45-64a1-ca76-000000004b41 | TIMING | tripleo_ha_wrapper : Detect if resource is being created or already exists | standalone | 0:29:17.568321 | 0.76s 2026-01-22 13:08:23.636048 | fa163e0d-6f45-64a1-ca76-000000004b42 | TASK | Run init bundle puppet on the host for cinder_volume 2026-01-22 13:08:48.708353 | fa163e0d-6f45-64a1-ca76-000000004b42 | CHANGED | Run init bundle puppet on the host for cinder_volume | standalone 2026-01-22 13:08:48.709590 | fa163e0d-6f45-64a1-ca76-000000004b42 | TIMING | tripleo_ha_wrapper : Run init bundle puppet on the host for cinder_volume | standalone | 0:29:42.674603 | 25.07s 2026-01-22 13:08:48.734543 | fa163e0d-6f45-64a1-ca76-000000004b43 | TASK | Check if /var/lib/config-data/puppet-generated/cinder exists 2026-01-22 13:08:48.962764 | fa163e0d-6f45-64a1-ca76-000000004b43 | OK | Check if /var/lib/config-data/puppet-generated/cinder exists | standalone 2026-01-22 13:08:48.964065 | fa163e0d-6f45-64a1-ca76-000000004b43 | TIMING | tripleo_ha_wrapper : Check if /var/lib/config-data/puppet-generated/cinder exists | standalone | 0:29:42.929075 | 0.23s 2026-01-22 13:08:48.988986 | fa163e0d-6f45-64a1-ca76-000000004b44 | TASK | Run pacemaker restart if the config file for the service changed 2026-01-22 13:08:49.462419 | fa163e0d-6f45-64a1-ca76-000000004b44 | CHANGED | Run pacemaker restart if the config file for the service changed | standalone 2026-01-22 13:08:49.463835 | fa163e0d-6f45-64a1-ca76-000000004b44 | TIMING | tripleo_ha_wrapper : Run pacemaker restart if the config file for the service changed | standalone | 0:29:43.428843 | 0.47s 2026-01-22 13:08:49.495511 | fa163e0d-6f45-64a1-ca76-000000004b45 | TASK | Check if an image update marker exists for bundle openstack-cinder-volume 2026-01-22 13:08:49.708443 | fa163e0d-6f45-64a1-ca76-000000004b45 | OK | Check if an image update marker exists for bundle openstack-cinder-volume | standalone 2026-01-22 13:08:49.709981 | fa163e0d-6f45-64a1-ca76-000000004b45 | TIMING | tripleo_ha_wrapper : Check if an image update marker exists for bundle openstack-cinder-volume | standalone | 0:29:43.674989 | 0.21s 2026-01-22 13:08:49.747932 | fa163e0d-6f45-64a1-ca76-000000004b47 | TASK | Get container image tag from marker for bundle openstack-cinder-volume 2026-01-22 13:08:49.810106 | fa163e0d-6f45-64a1-ca76-000000004b47 | SKIPPED | Get container image tag from marker for bundle openstack-cinder-volume | standalone 2026-01-22 13:08:49.811372 | fa163e0d-6f45-64a1-ca76-000000004b47 | TIMING | tripleo_ha_wrapper : Get container image tag from marker for bundle openstack-cinder-volume | standalone | 0:29:43.776379 | 0.06s 2026-01-22 13:08:49.842575 | fa163e0d-6f45-64a1-ca76-000000004b48 | TASK | Get container image tag from bundle openstack-cinder-volume 2026-01-22 13:08:49.905723 | fa163e0d-6f45-64a1-ca76-000000004b48 | SKIPPED | Get container image tag from bundle openstack-cinder-volume | standalone 2026-01-22 13:08:49.906695 | fa163e0d-6f45-64a1-ca76-000000004b48 | TIMING | tripleo_ha_wrapper : Get container image tag from bundle openstack-cinder-volume | standalone | 0:29:43.871708 | 0.06s 2026-01-22 13:08:49.930999 | fa163e0d-6f45-64a1-ca76-000000004b49 | TASK | Compare tags between marker and bundle openstack-cinder-volume 2026-01-22 13:08:49.981755 | fa163e0d-6f45-64a1-ca76-000000004b49 | SKIPPED | Compare tags between marker and bundle openstack-cinder-volume | standalone 2026-01-22 13:08:49.982939 | fa163e0d-6f45-64a1-ca76-000000004b49 | TIMING | tripleo_ha_wrapper : Compare tags between marker and bundle openstack-cinder-volume | standalone | 0:29:43.947944 | 0.05s 2026-01-22 13:08:50.007680 | fa163e0d-6f45-64a1-ca76-000000004b4a | TASK | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} 2026-01-22 13:08:50.045639 | fa163e0d-6f45-64a1-ca76-000000004b4a | SKIPPED | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone 2026-01-22 13:08:50.046477 | fa163e0d-6f45-64a1-ca76-000000004b4a | TIMING | tripleo_ha_wrapper : Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone | 0:29:44.011492 | 0.04s 2026-01-22 13:08:50.067977 | fa163e0d-6f45-64a1-ca76-000000004b4b | TASK | Remove update marker for bundle openstack-cinder-volume 2026-01-22 13:08:50.115278 | fa163e0d-6f45-64a1-ca76-000000004b4b | SKIPPED | Remove update marker for bundle openstack-cinder-volume | standalone 2026-01-22 13:08:50.116518 | fa163e0d-6f45-64a1-ca76-000000004b4b | TIMING | tripleo_ha_wrapper : Remove update marker for bundle openstack-cinder-volume | standalone | 0:29:44.081531 | 0.05s 2026-01-22 13:08:50.154013 | fa163e0d-6f45-64a1-ca76-000000004b6c | TASK | Gather variables for each operating system 2026-01-22 13:08:50.245490 | fa163e0d-6f45-64a1-ca76-000000004b6c | TIMING | tripleo_ha_wrapper : Gather variables for each operating system | standalone | 0:29:44.210485 | 0.09s 2026-01-22 13:08:50.276176 | fa163e0d-6f45-64a1-ca76-000000004b6d | TASK | Detect if resource is being created or already exists 2026-01-22 13:08:50.993070 | fa163e0d-6f45-64a1-ca76-000000004b6d | CHANGED | Detect if resource is being created or already exists | standalone 2026-01-22 13:08:50.994734 | fa163e0d-6f45-64a1-ca76-000000004b6d | TIMING | tripleo_ha_wrapper : Detect if resource is being created or already exists | standalone | 0:29:44.959739 | 0.72s 2026-01-22 13:08:51.026430 | fa163e0d-6f45-64a1-ca76-000000004b6e | TASK | Run init bundle puppet on the host for manila_share 2026-01-22 13:09:15.282510 | fa163e0d-6f45-64a1-ca76-000000004b6e | CHANGED | Run init bundle puppet on the host for manila_share | standalone 2026-01-22 13:09:15.283635 | fa163e0d-6f45-64a1-ca76-000000004b6e | TIMING | tripleo_ha_wrapper : Run init bundle puppet on the host for manila_share | standalone | 0:30:09.248648 | 24.26s 2026-01-22 13:09:15.304762 | fa163e0d-6f45-64a1-ca76-000000004b6f | TASK | Check if /var/lib/config-data/puppet-generated/manila exists 2026-01-22 13:09:15.574880 | fa163e0d-6f45-64a1-ca76-000000004b6f | OK | Check if /var/lib/config-data/puppet-generated/manila exists | standalone 2026-01-22 13:09:15.575905 | fa163e0d-6f45-64a1-ca76-000000004b6f | TIMING | tripleo_ha_wrapper : Check if /var/lib/config-data/puppet-generated/manila exists | standalone | 0:30:09.540918 | 0.27s 2026-01-22 13:09:15.596212 | fa163e0d-6f45-64a1-ca76-000000004b70 | TASK | Run pacemaker restart if the config file for the service changed 2026-01-22 13:09:16.045812 | fa163e0d-6f45-64a1-ca76-000000004b70 | CHANGED | Run pacemaker restart if the config file for the service changed | standalone 2026-01-22 13:09:16.047014 | fa163e0d-6f45-64a1-ca76-000000004b70 | TIMING | tripleo_ha_wrapper : Run pacemaker restart if the config file for the service changed | standalone | 0:30:10.012025 | 0.45s 2026-01-22 13:09:16.071905 | fa163e0d-6f45-64a1-ca76-000000004b71 | TASK | Check if an image update marker exists for bundle openstack-manila-share 2026-01-22 13:09:16.294150 | fa163e0d-6f45-64a1-ca76-000000004b71 | OK | Check if an image update marker exists for bundle openstack-manila-share | standalone 2026-01-22 13:09:16.295436 | fa163e0d-6f45-64a1-ca76-000000004b71 | TIMING | tripleo_ha_wrapper : Check if an image update marker exists for bundle openstack-manila-share | standalone | 0:30:10.260448 | 0.22s 2026-01-22 13:09:16.321461 | fa163e0d-6f45-64a1-ca76-000000004b73 | TASK | Get container image tag from marker for bundle openstack-manila-share 2026-01-22 13:09:16.361037 | fa163e0d-6f45-64a1-ca76-000000004b73 | SKIPPED | Get container image tag from marker for bundle openstack-manila-share | standalone 2026-01-22 13:09:16.361930 | fa163e0d-6f45-64a1-ca76-000000004b73 | TIMING | tripleo_ha_wrapper : Get container image tag from marker for bundle openstack-manila-share | standalone | 0:30:10.326943 | 0.04s 2026-01-22 13:09:16.386067 | fa163e0d-6f45-64a1-ca76-000000004b74 | TASK | Get container image tag from bundle openstack-manila-share 2026-01-22 13:09:16.429699 | fa163e0d-6f45-64a1-ca76-000000004b74 | SKIPPED | Get container image tag from bundle openstack-manila-share | standalone 2026-01-22 13:09:16.430562 | fa163e0d-6f45-64a1-ca76-000000004b74 | TIMING | tripleo_ha_wrapper : Get container image tag from bundle openstack-manila-share | standalone | 0:30:10.395574 | 0.04s 2026-01-22 13:09:16.455147 | fa163e0d-6f45-64a1-ca76-000000004b75 | TASK | Compare tags between marker and bundle openstack-manila-share 2026-01-22 13:09:16.486853 | fa163e0d-6f45-64a1-ca76-000000004b75 | SKIPPED | Compare tags between marker and bundle openstack-manila-share | standalone 2026-01-22 13:09:16.487621 | fa163e0d-6f45-64a1-ca76-000000004b75 | TIMING | tripleo_ha_wrapper : Compare tags between marker and bundle openstack-manila-share | standalone | 0:30:10.452635 | 0.03s 2026-01-22 13:09:16.512609 | fa163e0d-6f45-64a1-ca76-000000004b76 | TASK | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} 2026-01-22 13:09:16.561008 | fa163e0d-6f45-64a1-ca76-000000004b76 | SKIPPED | Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone 2026-01-22 13:09:16.561812 | fa163e0d-6f45-64a1-ca76-000000004b76 | TIMING | tripleo_ha_wrapper : Remove old image tag {{ old_image }} for bundle {{ tripleo_ha_wrapper_bundle_name }} | standalone | 0:30:10.526823 | 0.05s 2026-01-22 13:09:16.585855 | fa163e0d-6f45-64a1-ca76-000000004b77 | TASK | Remove update marker for bundle openstack-manila-share 2026-01-22 13:09:16.634870 | fa163e0d-6f45-64a1-ca76-000000004b77 | SKIPPED | Remove update marker for bundle openstack-manila-share | standalone 2026-01-22 13:09:16.635624 | fa163e0d-6f45-64a1-ca76-000000004b77 | TIMING | tripleo_ha_wrapper : Remove update marker for bundle openstack-manila-share | standalone | 0:30:10.600637 | 0.05s 2026-01-22 13:09:16.679216 | fa163e0d-6f45-64a1-ca76-000000004b98 | TASK | Run kolla_set_configs to copy ring files 2026-01-22 13:09:17.224582 | fa163e0d-6f45-64a1-ca76-000000004b98 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_proxy 2026-01-22 13:09:17.229263 | fa163e0d-6f45-64a1-ca76-000000004b98 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:11.194275 | 0.55s 2026-01-22 13:09:17.237344 | fa163e0d-6f45-64a1-ca76-000000004b98 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:11.202345 | 0.56s 2026-01-22 13:09:17.272349 | fa163e0d-6f45-64a1-ca76-000000004b99 | TASK | Run kolla_set_configs to copy ring files 2026-01-22 13:09:17.593437 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_account_auditor 2026-01-22 13:09:17.597248 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:11.562193 | 0.32s 2026-01-22 13:09:18.068919 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_account_reaper 2026-01-22 13:09:18.071959 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:12.036967 | 0.80s 2026-01-22 13:09:18.352357 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_account_replicator 2026-01-22 13:09:18.353198 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:12.318212 | 1.08s 2026-01-22 13:09:18.830536 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_account_server 2026-01-22 13:09:18.832354 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:12.797367 | 1.56s 2026-01-22 13:09:19.133127 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_container_auditor 2026-01-22 13:09:19.135506 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:13.100514 | 1.86s 2026-01-22 13:09:19.444903 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_container_replicator 2026-01-22 13:09:19.447408 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:13.412282 | 2.17s 2026-01-22 13:09:19.924002 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_container_server 2026-01-22 13:09:19.925596 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:13.890610 | 2.65s 2026-01-22 13:09:20.411566 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_container_updater 2026-01-22 13:09:20.414493 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:14.379493 | 3.14s 2026-01-22 13:09:20.745064 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_object_auditor 2026-01-22 13:09:20.747162 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:14.712171 | 3.47s 2026-01-22 13:09:21.219545 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_object_expirer 2026-01-22 13:09:21.220507 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:15.185522 | 3.95s 2026-01-22 13:09:21.583577 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_object_replicator 2026-01-22 13:09:21.584687 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:15.549668 | 4.31s 2026-01-22 13:09:22.086187 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_object_server 2026-01-22 13:09:22.087751 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:16.052759 | 4.81s 2026-01-22 13:09:22.557290 | fa163e0d-6f45-64a1-ca76-000000004b99 | CHANGED | Run kolla_set_configs to copy ring files | standalone | item=swift_object_updater 2026-01-22 13:09:22.559209 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:16.524212 | 5.29s 2026-01-22 13:09:22.574095 | fa163e0d-6f45-64a1-ca76-000000004b99 | TIMING | Run kolla_set_configs to copy ring files | standalone | 0:30:16.539094 | 5.30s 2026-01-22 13:09:22.610512 | fa163e0d-6f45-64a1-ca76-0000000000f2 | TASK | Check if /var/lib/tripleo-config/container-startup-config/step_5 already exists 2026-01-22 13:09:22.857402 | fa163e0d-6f45-64a1-ca76-0000000000f2 | OK | Check if /var/lib/tripleo-config/container-startup-config/step_5 already exists | standalone 2026-01-22 13:09:22.858816 | fa163e0d-6f45-64a1-ca76-0000000000f2 | TIMING | Check if /var/lib/tripleo-config/container-startup-config/step_5 already exists | standalone | 0:30:16.823825 | 0.25s 2026-01-22 13:09:22.972830 | fa163e0d-6f45-64a1-ca76-0000000000f3 | TIMING | include_tasks | standalone | 0:30:16.937825 | 0.08s 2026-01-22 13:09:23.014751 | 107f8c63-8a75-486c-9615-82386333b4ad | INCLUDED | /root/standalone-ansible-mz1ymllk/common_deploy_steps_tasks.yaml | standalone 2026-01-22 13:09:23.041269 | fa163e0d-6f45-64a1-ca76-000000004c16 | TASK | Write the config_step hieradata 2026-01-22 13:09:23.535256 | fa163e0d-6f45-64a1-ca76-000000004c16 | OK | Write the config_step hieradata | standalone 2026-01-22 13:09:23.536517 | fa163e0d-6f45-64a1-ca76-000000004c16 | TIMING | Write the config_step hieradata | standalone | 0:30:17.501524 | 0.49s 2026-01-22 13:09:23.566358 | fa163e0d-6f45-64a1-ca76-000000004c17 | TASK | Run puppet host configuration for step 5 2026-01-22 13:09:23.831690 | fa163e0d-6f45-64a1-ca76-000000004c17 | CHANGED | Run puppet host configuration for step 5 | standalone 2026-01-22 13:09:23.833187 | fa163e0d-6f45-64a1-ca76-000000004c17 | TIMING | Run puppet host configuration for step 5 | standalone | 0:30:17.798189 | 0.27s 2026-01-22 13:09:23.866141 | fa163e0d-6f45-64a1-ca76-000000004c18 | TASK | Wait for puppet host configuration to finish 2026-01-22 13:09:24.126889 | fa163e0d-6f45-64a1-ca76-000000004c18 | WAITING | Wait for puppet host configuration to finish | standalone | 360 retries left 2026-01-22 13:09:34.350453 | fa163e0d-6f45-64a1-ca76-000000004c18 | WAITING | Wait for puppet host configuration to finish | standalone | 359 retries left 2026-01-22 13:09:44.583203 | fa163e0d-6f45-64a1-ca76-000000004c18 | CHANGED | Wait for puppet host configuration to finish | standalone 2026-01-22 13:09:44.585719 | fa163e0d-6f45-64a1-ca76-000000004c18 | TIMING | Wait for puppet host configuration to finish | standalone | 0:30:38.550719 | 20.71s 2026-01-22 13:09:44.618619 | fa163e0d-6f45-64a1-ca76-000000004c19 | TASK | Debug output for task: Run puppet host configuration for step 5 2026-01-22 13:09:44.713579 | fa163e0d-6f45-64a1-ca76-000000004c19 | CHANGED | Debug output for task: Run puppet host configuration for step 5 | standalone | result={ "changed": true, "failed_when_result": false, "puppet_host_outputs.stdout_lines | default([]) | union(puppet_host_outputs.stderr_lines | default([]))": [ "<13>Jan 22 13:09:24 puppet-user: Warning: /etc/puppet/hiera.yaml: Use of 'hiera.yaml' version 3 is deprecated. It should be converted to version 5", "<13>Jan 22 13:09:30 puppet-user: (file: /etc/puppet/hiera.yaml)", "<13>Jan 22 13:09:30 puppet-user: Warning: Undefined variable '::deploy_config_name'; ", "<13>Jan 22 13:09:30 puppet-user: (file & line not available)", "<13>Jan 22 13:09:30 puppet-user: Warning: The function 'hiera' is deprecated in favor of using 'lookup'. See https://puppet.com/docs/puppet/7.10/deprecated_language.html", "<13>Jan 22 13:09:30 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/profile/base/database/mysql/client.pp, line: 89, column: 8)", "<13>Jan 22 13:09:31 puppet-user: Warning: This method is deprecated, please use match expressions with Stdlib::Compat::String instead. They are described at https://docs.puppet.com/puppet/latest/reference/lang_data_type.html#match-expressions. at [\"/etc/puppet/modules/snmp/manifests/params.pp\", 310]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:09:31 puppet-user: (location: /etc/puppet/modules/stdlib/lib/puppet/functions/deprecation.rb:34:in `deprecation')", "<13>Jan 22 13:09:31 puppet-user: Warning: This method is deprecated, please use the stdlib validate_legacy function,", "<13>Jan 22 13:09:31 puppet-user: with Stdlib::Compat::Bool. There is further documentation for validate_legacy function in the README. at [\"/etc/puppet/modules/snmp/manifests/init.pp\", 358]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:09:31 puppet-user: with Stdlib::Compat::Array. There is further documentation for validate_legacy function in the README. at [\"/etc/puppet/modules/snmp/manifests/init.pp\", 367]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:09:31 puppet-user: with Stdlib::Compat::String. There is further documentation for validate_legacy function in the README. at [\"/etc/puppet/modules/snmp/manifests/init.pp\", 382]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:09:31 puppet-user: with Stdlib::Compat::Numeric. There is further documentation for validate_legacy function in the README. at [\"/etc/puppet/modules/snmp/manifests/init.pp\", 388]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:09:31 puppet-user: with Pattern[]. There is further documentation for validate_legacy function in the README. at [\"/etc/puppet/modules/snmp/manifests/init.pp\", 393]:[\"/var/lib/tripleo-config/puppet_step_config.pp\", 7]", "<13>Jan 22 13:09:31 puppet-user: Warning: Unknown variable: '::deployment_type'. (file: /etc/puppet/modules/tripleo/manifests/packages.pp, line: 39, column: 69)", "<13>Jan 22 13:09:31 puppet-user: Notice: Compiled catalog for standalone.ooo.test in environment production in 0.56 seconds", "<13>Jan 22 13:09:36 puppet-user: Notice: /Stage[main]/Pacemaker::Resource_defaults/Pcmk_resource_default[resource-stickiness]/ensure: created", "<13>Jan 22 13:09:38 puppet-user: Notice: /Stage[main]/Pacemaker::Resource_op_defaults/Pcmk_resource_op_default[bundle]/ensure: created", "<13>Jan 22 13:09:40 puppet-user: Deprecation Warning: This command is deprecated and will be removed. Please use 'pcs property config' instead.", "<13>Jan 22 13:09:40 puppet-user: Notice: Applied catalog in 8.65 seconds", "<13>Jan 22 13:09:40 puppet-user: Application:", "<13>Jan 22 13:09:40 puppet-user: Initial environment: production", "<13>Jan 22 13:09:40 puppet-user: Converged environment: production", "<13>Jan 22 13:09:40 puppet-user: Run mode: user", "<13>Jan 22 13:09:40 puppet-user: Changes:", "<13>Jan 22 13:09:40 puppet-user: Total: 2", "<13>Jan 22 13:09:40 puppet-user: Events:", "<13>Jan 22 13:09:40 puppet-user: Success: 2", "<13>Jan 22 13:09:40 puppet-user: Resources:", "<13>Jan 22 13:09:40 puppet-user: Changed: 2", "<13>Jan 22 13:09:40 puppet-user: Out of sync: 2", "<13>Jan 22 13:09:40 puppet-user: Total: 37", "<13>Jan 22 13:09:40 puppet-user: Time:", "<13>Jan 22 13:09:40 puppet-user: Schedule: 0.00", "<13>Jan 22 13:09:40 puppet-user: File line: 0.00", "<13>Jan 22 13:09:40 puppet-user: Package: 0.00", "<13>Jan 22 13:09:40 puppet-user: Augeas: 0.01", "<13>Jan 22 13:09:40 puppet-user: User: 0.01", "<13>Jan 22 13:09:40 puppet-user: File: 0.10", "<13>Jan 22 13:09:40 puppet-user: Service: 0.17", "<13>Jan 22 13:09:40 puppet-user: Config retrieval: 0.67", "<13>Jan 22 13:09:40 puppet-user: Pcmk property: 1.46", "<13>Jan 22 13:09:40 puppet-user: Last run: 1769087380", "<13>Jan 22 13:09:40 puppet-user: Exec: 2.06", "<13>Jan 22 13:09:40 puppet-user: Pcmk resource op default: 2.24", "<13>Jan 22 13:09:40 puppet-user: Pcmk resource default: 2.35", "<13>Jan 22 13:09:40 puppet-user: Filebucket: 0.00", "<13>Jan 22 13:09:40 puppet-user: Transaction evaluation: 8.59", "<13>Jan 22 13:09:40 puppet-user: Catalog application: 8.65", "<13>Jan 22 13:09:40 puppet-user: Total: 8.66", "<13>Jan 22 13:09:40 puppet-user: Version:", "<13>Jan 22 13:09:40 puppet-user: Config: 1769087370", "<13>Jan 22 13:09:40 puppet-user: Puppet: 7.10.0" ] } 2026-01-22 13:09:44.715051 | fa163e0d-6f45-64a1-ca76-000000004c19 | TIMING | Debug output for task: Run puppet host configuration for step 5 | standalone | 0:30:38.680056 | 0.09s 2026-01-22 13:09:44.745046 | fa163e0d-6f45-64a1-ca76-000000004c1a | TASK | Pre-cache facts for puppet containers 2026-01-22 13:09:44.775182 | fa163e0d-6f45-64a1-ca76-000000004c1a | TIMING | Pre-cache facts for puppet containers | standalone | 0:30:38.740181 | 0.03s 2026-01-22 13:09:44.844145 | fa163e0d-6f45-64a1-ca76-000000004c59 | TASK | Gather variables for each operating system 2026-01-22 13:09:44.973885 | fa163e0d-6f45-64a1-ca76-000000004c59 | TIMING | tripleo_puppet_cache : Gather variables for each operating system | standalone | 0:30:38.938879 | 0.13s 2026-01-22 13:09:45.005616 | fa163e0d-6f45-64a1-ca76-000000004c5a | TASK | Create puppet caching structures 2026-01-22 13:09:45.294124 | fa163e0d-6f45-64a1-ca76-000000004c5a | CHANGED | Create puppet caching structures | standalone 2026-01-22 13:09:45.295418 | fa163e0d-6f45-64a1-ca76-000000004c5a | TIMING | tripleo_puppet_cache : Create puppet caching structures | standalone | 0:30:39.260430 | 0.29s 2026-01-22 13:09:45.318077 | fa163e0d-6f45-64a1-ca76-000000004c5b | TASK | Check for facter.conf 2026-01-22 13:09:45.536704 | fa163e0d-6f45-64a1-ca76-000000004c5b | OK | Check for facter.conf | standalone 2026-01-22 13:09:45.538562 | fa163e0d-6f45-64a1-ca76-000000004c5b | TIMING | tripleo_puppet_cache : Check for facter.conf | standalone | 0:30:39.503567 | 0.22s 2026-01-22 13:09:45.569090 | fa163e0d-6f45-64a1-ca76-000000004c5c | TASK | Remove facter.conf if directory 2026-01-22 13:09:45.620610 | fa163e0d-6f45-64a1-ca76-000000004c5c | SKIPPED | Remove facter.conf if directory | standalone 2026-01-22 13:09:45.621747 | fa163e0d-6f45-64a1-ca76-000000004c5c | TIMING | tripleo_puppet_cache : Remove facter.conf if directory | standalone | 0:30:39.586753 | 0.05s 2026-01-22 13:09:45.651203 | fa163e0d-6f45-64a1-ca76-000000004c5d | TASK | Write facter cache config 2026-01-22 13:09:46.175885 | fa163e0d-6f45-64a1-ca76-000000004c5d | CHANGED | Write facter cache config | standalone 2026-01-22 13:09:46.177700 | fa163e0d-6f45-64a1-ca76-000000004c5d | TIMING | tripleo_puppet_cache : Write facter cache config | standalone | 0:30:40.142673 | 0.53s 2026-01-22 13:09:46.211507 | fa163e0d-6f45-64a1-ca76-000000004c5e | TASK | Cleanup facter cache if exists 2026-01-22 13:09:46.486048 | fa163e0d-6f45-64a1-ca76-000000004c5e | CHANGED | Cleanup facter cache if exists | standalone 2026-01-22 13:09:46.487609 | fa163e0d-6f45-64a1-ca76-000000004c5e | TIMING | tripleo_puppet_cache : Cleanup facter cache if exists | standalone | 0:30:40.452613 | 0.27s 2026-01-22 13:09:46.518426 | fa163e0d-6f45-64a1-ca76-000000004c5f | TASK | Pre-cache facts 2026-01-22 13:09:47.168246 | fa163e0d-6f45-64a1-ca76-000000004c5f | CHANGED | Pre-cache facts | standalone 2026-01-22 13:09:47.169535 | fa163e0d-6f45-64a1-ca76-000000004c5f | TIMING | tripleo_puppet_cache : Pre-cache facts | standalone | 0:30:41.134513 | 0.65s 2026-01-22 13:09:47.206209 | fa163e0d-6f45-64a1-ca76-000000004c60 | TASK | Failed deployment if facter fails 2026-01-22 13:09:47.247988 | fa163e0d-6f45-64a1-ca76-000000004c60 | SKIPPED | Failed deployment if facter fails | standalone 2026-01-22 13:09:47.249361 | fa163e0d-6f45-64a1-ca76-000000004c60 | TIMING | tripleo_puppet_cache : Failed deployment if facter fails | standalone | 0:30:41.214368 | 0.04s 2026-01-22 13:09:47.296185 | fa163e0d-6f45-64a1-ca76-000000004c61 | TASK | Sync cached facts 2026-01-22 13:09:47.673449 | fa163e0d-6f45-64a1-ca76-000000004c61 | CHANGED | Sync cached facts | standalone -> 192.168.122.100 [WARNING]: ('standalone -> 192.168.122.100', 'fa163e0d-6f45-64a1-ca76-000000004c61') missing from stats 2026-01-22 13:09:47.732145 | fa163e0d-6f45-64a1-ca76-000000004c1b | TASK | Include container-puppet tasks (generate config) during step 1 2026-01-22 13:09:47.785915 | fa163e0d-6f45-64a1-ca76-000000004c1b | SKIPPED | Include container-puppet tasks (generate config) during step 1 | standalone 2026-01-22 13:09:47.787288 | fa163e0d-6f45-64a1-ca76-000000004c1b | TIMING | Include container-puppet tasks (generate config) during step 1 | standalone | 0:30:41.752293 | 0.05s 2026-01-22 13:09:47.820153 | fa163e0d-6f45-64a1-ca76-000000004c1d | TASK | Manage containers for step 5 with tripleo-ansible 2026-01-22 13:09:47.850799 | fa163e0d-6f45-64a1-ca76-000000004c1d | TIMING | Manage containers for step 5 with tripleo-ansible | standalone | 0:30:41.815802 | 0.03s 2026-01-22 13:09:47.918522 | fa163e0d-6f45-64a1-ca76-000000004cb8 | TASK | Gather variables for each operating system 2026-01-22 13:09:48.044868 | fa163e0d-6f45-64a1-ca76-000000004cb8 | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:30:42.009855 | 0.12s 2026-01-22 13:09:48.076381 | fa163e0d-6f45-64a1-ca76-000000004cb9 | TASK | Create container logs path 2026-01-22 13:09:48.344406 | fa163e0d-6f45-64a1-ca76-000000004cb9 | OK | Create container logs path | standalone 2026-01-22 13:09:48.345978 | fa163e0d-6f45-64a1-ca76-000000004cb9 | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:30:42.310985 | 0.27s 2026-01-22 13:09:48.378106 | fa163e0d-6f45-64a1-ca76-000000004cbb | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_5 2026-01-22 13:09:48.652707 | fa163e0d-6f45-64a1-ca76-000000004cbb | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_5 | standalone 2026-01-22 13:09:48.654026 | fa163e0d-6f45-64a1-ca76-000000004cbb | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-startup-config/step_5 | standalone | 0:30:42.619031 | 0.27s 2026-01-22 13:09:48.683477 | fa163e0d-6f45-64a1-ca76-000000004cbc | TASK | Finalise hashes for all containers 2026-01-22 13:09:48.756637 | fa163e0d-6f45-64a1-ca76-000000004cbc | OK | Finalise hashes for all containers | standalone 2026-01-22 13:09:48.757886 | fa163e0d-6f45-64a1-ca76-000000004cbc | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:30:42.722891 | 0.07s 2026-01-22 13:09:48.787493 | fa163e0d-6f45-64a1-ca76-000000004cbe | TASK | Manage systemd shutdown files 2026-01-22 13:09:48.827843 | fa163e0d-6f45-64a1-ca76-000000004cbe | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:30:42.792843 | 0.04s 2026-01-22 13:09:48.861906 | aab07b61-84c8-4afc-9bea-9b83ad4875e4 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/shutdown.yml | standalone 2026-01-22 13:09:48.890691 | fa163e0d-6f45-64a1-ca76-000000004cf1 | TASK | Check if /etc/sysconfig/podman_drop_in exists 2026-01-22 13:09:49.158079 | fa163e0d-6f45-64a1-ca76-000000004cf1 | OK | Check if /etc/sysconfig/podman_drop_in exists | standalone 2026-01-22 13:09:49.159539 | fa163e0d-6f45-64a1-ca76-000000004cf1 | TIMING | tripleo_container_manage : Check if /etc/sysconfig/podman_drop_in exists | standalone | 0:30:43.124547 | 0.27s 2026-01-22 13:09:49.191630 | fa163e0d-6f45-64a1-ca76-000000004cf2 | TASK | Set podman_drop_in fact 2026-01-22 13:09:49.253616 | fa163e0d-6f45-64a1-ca76-000000004cf2 | OK | Set podman_drop_in fact | standalone 2026-01-22 13:09:49.254759 | fa163e0d-6f45-64a1-ca76-000000004cf2 | TIMING | tripleo_container_manage : Set podman_drop_in fact | standalone | 0:30:43.219767 | 0.06s 2026-01-22 13:09:49.284336 | fa163e0d-6f45-64a1-ca76-000000004cf4 | TASK | Deploy tripleo-container-shutdown and tripleo-start-podman-container 2026-01-22 13:09:49.832087 | fa163e0d-6f45-64a1-ca76-000000004cf4 | OK | Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | item=tripleo-container-shutdown 2026-01-22 13:09:49.835161 | fa163e0d-6f45-64a1-ca76-000000004cf4 | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:30:43.800147 | 0.55s 2026-01-22 13:09:50.284695 | fa163e0d-6f45-64a1-ca76-000000004cf4 | OK | Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | item=tripleo-start-podman-container 2026-01-22 13:09:50.285506 | fa163e0d-6f45-64a1-ca76-000000004cf4 | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:30:44.250521 | 1.00s 2026-01-22 13:09:50.289399 | fa163e0d-6f45-64a1-ca76-000000004cf4 | TIMING | tripleo_container_manage : Deploy tripleo-container-shutdown and tripleo-start-podman-container | standalone | 0:30:44.254409 | 1.00s 2026-01-22 13:09:50.309331 | fa163e0d-6f45-64a1-ca76-000000004cf5 | TASK | Create /usr/lib/systemd/system/tripleo-container-shutdown.service 2026-01-22 13:09:50.766560 | fa163e0d-6f45-64a1-ca76-000000004cf5 | OK | Create /usr/lib/systemd/system/tripleo-container-shutdown.service | standalone 2026-01-22 13:09:50.768937 | fa163e0d-6f45-64a1-ca76-000000004cf5 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system/tripleo-container-shutdown.service | standalone | 0:30:44.733924 | 0.46s 2026-01-22 13:09:50.799753 | fa163e0d-6f45-64a1-ca76-000000004cf6 | TASK | Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset 2026-01-22 13:09:51.349523 | fa163e0d-6f45-64a1-ca76-000000004cf6 | OK | Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset | standalone 2026-01-22 13:09:51.350574 | fa163e0d-6f45-64a1-ca76-000000004cf6 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system-preset/91-tripleo-container-shutdown.preset | standalone | 0:30:45.315587 | 0.55s 2026-01-22 13:09:51.373368 | fa163e0d-6f45-64a1-ca76-000000004cf7 | TASK | Enable and start tripleo-container-shutdown 2026-01-22 13:09:52.354174 | fa163e0d-6f45-64a1-ca76-000000004cf7 | OK | Enable and start tripleo-container-shutdown | standalone 2026-01-22 13:09:52.355389 | fa163e0d-6f45-64a1-ca76-000000004cf7 | TIMING | tripleo_container_manage : Enable and start tripleo-container-shutdown | standalone | 0:30:46.320405 | 0.98s 2026-01-22 13:09:52.375158 | fa163e0d-6f45-64a1-ca76-000000004cf8 | TASK | Create /usr/lib/systemd/system/netns-placeholder.service 2026-01-22 13:09:52.915822 | fa163e0d-6f45-64a1-ca76-000000004cf8 | OK | Create /usr/lib/systemd/system/netns-placeholder.service | standalone 2026-01-22 13:09:52.917447 | fa163e0d-6f45-64a1-ca76-000000004cf8 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system/netns-placeholder.service | standalone | 0:30:46.882453 | 0.54s 2026-01-22 13:09:52.946780 | fa163e0d-6f45-64a1-ca76-000000004cf9 | TASK | Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset 2026-01-22 13:09:53.462664 | fa163e0d-6f45-64a1-ca76-000000004cf9 | OK | Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset | standalone 2026-01-22 13:09:53.464069 | fa163e0d-6f45-64a1-ca76-000000004cf9 | TIMING | tripleo_container_manage : Create /usr/lib/systemd/system-preset/91-netns-placeholder.preset | standalone | 0:30:47.429076 | 0.52s 2026-01-22 13:09:53.493994 | fa163e0d-6f45-64a1-ca76-000000004cfa | TASK | Enable and start netns-placeholder 2026-01-22 13:09:54.555600 | fa163e0d-6f45-64a1-ca76-000000004cfa | CHANGED | Enable and start netns-placeholder | standalone 2026-01-22 13:09:54.556820 | fa163e0d-6f45-64a1-ca76-000000004cfa | TIMING | tripleo_container_manage : Enable and start netns-placeholder | standalone | 0:30:48.521835 | 1.06s 2026-01-22 13:09:54.576884 | fa163e0d-6f45-64a1-ca76-000000004cc0 | TASK | Update container configs with new config hashes 2026-01-22 13:09:54.614701 | fa163e0d-6f45-64a1-ca76-000000004cc0 | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:30:48.579702 | 0.04s 2026-01-22 13:09:54.628705 | 392a68ba-7394-4c0f-b889-6e2ec00d6b22 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/puppet_config.yml | standalone 2026-01-22 13:09:54.655338 | fa163e0d-6f45-64a1-ca76-000000004d22 | TASK | Update config hashes for container startup configs 2026-01-22 13:09:54.961014 | fa163e0d-6f45-64a1-ca76-000000004d22 | OK | Update config hashes for container startup configs | standalone 2026-01-22 13:09:54.962040 | fa163e0d-6f45-64a1-ca76-000000004d22 | TIMING | tripleo_container_manage : Update config hashes for container startup configs | standalone | 0:30:48.927055 | 0.31s 2026-01-22 13:09:54.982531 | fa163e0d-6f45-64a1-ca76-000000004cc1 | TASK | Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_5 2026-01-22 13:09:55.019471 | fa163e0d-6f45-64a1-ca76-000000004cc1 | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-startup-config/step_5 | standalone | 0:30:48.984481 | 0.04s 2026-01-22 13:09:55.030601 | 5f521146-5e19-4d06-b3c9-bcb0ff76e795 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/delete_orphan.yml | standalone 2026-01-22 13:09:55.057685 | fa163e0d-6f45-64a1-ca76-000000004d41 | TASK | Gather podman infos 2026-01-22 13:09:57.198262 | fa163e0d-6f45-64a1-ca76-000000004d41 | OK | Gather podman infos | standalone 2026-01-22 13:09:57.199690 | fa163e0d-6f45-64a1-ca76-000000004d41 | TIMING | tripleo_container_manage : Gather podman infos | standalone | 0:30:51.164699 | 2.14s 2026-01-22 13:09:57.348205 | fa163e0d-6f45-64a1-ca76-000000004d42 | TASK | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_5 2026-01-22 13:09:57.410277 | fa163e0d-6f45-64a1-ca76-000000004d42 | TIMING | Delete orphan containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_5 | standalone | 0:30:51.375287 | 0.05s 2026-01-22 13:09:57.591765 | fa163e0d-6f45-64a1-ca76-000000004d8b | TIMING | tripleo_container_rm : include_tasks | standalone | 0:30:51.556758 | 0.14s 2026-01-22 13:09:57.653355 | fa163e0d-6f45-64a1-ca76-000000004cc2 | TASK | Create containers from /var/lib/tripleo-config/container-startup-config/step_5 2026-01-22 13:09:57.707422 | fa163e0d-6f45-64a1-ca76-000000004cc2 | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-startup-config/step_5 | standalone | 0:30:51.672424 | 0.05s 2026-01-22 13:09:57.731508 | a5efd9f6-c61b-4231-acc6-50bb66863904 | INCLUDED | /usr/share/ansible/roles/tripleo_container_manage/tasks/create.yml | standalone 2026-01-22 13:09:57.768232 | fa163e0d-6f45-64a1-ca76-000000004db1 | TASK | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_5 2026-01-22 13:10:23.366575 | fa163e0d-6f45-64a1-ca76-000000004db1 | CHANGED | Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_5 | standalone 2026-01-22 13:10:23.368403 | fa163e0d-6f45-64a1-ca76-000000004db1 | TIMING | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_5 | standalone | 0:31:17.333419 | 25.60s 2026-01-22 13:10:23.389071 | fa163e0d-6f45-64a1-ca76-000000004db2 | TASK | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_5 2026-01-22 13:10:48.218861 | fa163e0d-6f45-64a1-ca76-000000004db2 | CHANGED | Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_5 | standalone 2026-01-22 13:10:48.220808 | fa163e0d-6f45-64a1-ca76-000000004db2 | TIMING | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_5 | standalone | 0:31:42.185809 | 24.83s 2026-01-22 13:10:48.279349 | fa163e0d-6f45-64a1-ca76-000000004c1f | TASK | Clean container_puppet_tasks for standalone step 5 2026-01-22 13:10:48.485575 | fa163e0d-6f45-64a1-ca76-000000004c1f | OK | Clean container_puppet_tasks for standalone step 5 | standalone 2026-01-22 13:10:48.486426 | fa163e0d-6f45-64a1-ca76-000000004c1f | TIMING | Clean container_puppet_tasks for standalone step 5 | standalone | 0:31:42.451442 | 0.21s 2026-01-22 13:10:48.514978 | fa163e0d-6f45-64a1-ca76-000000004c20 | TASK | Calculate container_puppet_tasks for standalone step 5 2026-01-22 13:10:48.566136 | fa163e0d-6f45-64a1-ca76-000000004c20 | TIMING | Calculate container_puppet_tasks for standalone step 5 | standalone | 0:31:42.531147 | 0.05s 2026-01-22 13:10:48.586687 | fa163e0d-6f45-64a1-ca76-000000004c21 | TASK | Include container-puppet tasks for step 5 2026-01-22 13:10:48.613990 | fa163e0d-6f45-64a1-ca76-000000004c21 | TIMING | Include container-puppet tasks for step 5 | standalone | 0:31:42.578995 | 0.03s 2026-01-22 13:10:48.627467 | ef4eadb6-9ec3-4d90-82ea-f1db6cb4b792 | INCLUDED | /root/standalone-ansible-mz1ymllk/host-container-puppet-tasks.yaml | standalone 2026-01-22 13:10:48.655982 | fa163e0d-6f45-64a1-ca76-000000004dfd | TASK | Write container-puppet-tasks json file for standalone step 5 2026-01-22 13:10:49.104137 | fa163e0d-6f45-64a1-ca76-000000004dfd | CHANGED | Write container-puppet-tasks json file for standalone step 5 | standalone 2026-01-22 13:10:49.105442 | fa163e0d-6f45-64a1-ca76-000000004dfd | TIMING | Write container-puppet-tasks json file for standalone step 5 | standalone | 0:31:43.070446 | 0.45s 2026-01-22 13:10:49.136885 | fa163e0d-6f45-64a1-ca76-000000004dff | TASK | Generate container puppet configs for step 5 2026-01-22 13:10:49.381576 | fa163e0d-6f45-64a1-ca76-000000004dff | OK | Generate container puppet configs for step 5 | standalone 2026-01-22 13:10:49.382869 | fa163e0d-6f45-64a1-ca76-000000004dff | TIMING | Generate container puppet configs for step 5 | standalone | 0:31:43.347877 | 0.24s 2026-01-22 13:10:49.416448 | fa163e0d-6f45-64a1-ca76-000000004e00 | TASK | Manage Puppet containers (bootstrap tasks) for step 5 with tripleo-ansible 2026-01-22 13:10:49.439297 | fa163e0d-6f45-64a1-ca76-000000004e00 | TIMING | Manage Puppet containers (bootstrap tasks) for step 5 with tripleo-ansible | standalone | 0:31:43.404286 | 0.02s 2026-01-22 13:10:49.513266 | fa163e0d-6f45-64a1-ca76-000000004e2f | TASK | Gather variables for each operating system 2026-01-22 13:10:49.605411 | fa163e0d-6f45-64a1-ca76-000000004e2f | TIMING | tripleo_container_manage : Gather variables for each operating system | standalone | 0:31:43.570405 | 0.09s 2026-01-22 13:10:49.634532 | fa163e0d-6f45-64a1-ca76-000000004e30 | TASK | Create container logs path 2026-01-22 13:10:49.841992 | fa163e0d-6f45-64a1-ca76-000000004e30 | OK | Create container logs path | standalone 2026-01-22 13:10:49.842844 | fa163e0d-6f45-64a1-ca76-000000004e30 | TIMING | tripleo_container_manage : Create container logs path | standalone | 0:31:43.807860 | 0.21s 2026-01-22 13:10:49.864139 | fa163e0d-6f45-64a1-ca76-000000004e32 | TASK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_5 2026-01-22 13:10:50.100183 | fa163e0d-6f45-64a1-ca76-000000004e32 | OK | Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_5 | standalone 2026-01-22 13:10:50.100948 | fa163e0d-6f45-64a1-ca76-000000004e32 | TIMING | tripleo_container_manage : Find all matching configs configs for in /var/lib/tripleo-config/container-puppet-config/step_5 | standalone | 0:31:44.065964 | 0.24s 2026-01-22 13:10:50.120072 | fa163e0d-6f45-64a1-ca76-000000004e33 | TASK | Finalise hashes for all containers 2026-01-22 13:10:50.168195 | fa163e0d-6f45-64a1-ca76-000000004e33 | OK | Finalise hashes for all containers | standalone 2026-01-22 13:10:50.169124 | fa163e0d-6f45-64a1-ca76-000000004e33 | TIMING | tripleo_container_manage : Finalise hashes for all containers | standalone | 0:31:44.134137 | 0.05s 2026-01-22 13:10:50.189954 | fa163e0d-6f45-64a1-ca76-000000004e35 | TASK | Manage systemd shutdown files 2026-01-22 13:10:50.217476 | fa163e0d-6f45-64a1-ca76-000000004e35 | SKIPPED | Manage systemd shutdown files | standalone 2026-01-22 13:10:50.218367 | fa163e0d-6f45-64a1-ca76-000000004e35 | TIMING | tripleo_container_manage : Manage systemd shutdown files | standalone | 0:31:44.183382 | 0.03s 2026-01-22 13:10:50.238615 | fa163e0d-6f45-64a1-ca76-000000004e37 | TASK | Update container configs with new config hashes 2026-01-22 13:10:50.277076 | fa163e0d-6f45-64a1-ca76-000000004e37 | SKIPPED | Update container configs with new config hashes | standalone 2026-01-22 13:10:50.277995 | fa163e0d-6f45-64a1-ca76-000000004e37 | TIMING | tripleo_container_manage : Update container configs with new config hashes | standalone | 0:31:44.243007 | 0.04s 2026-01-22 13:10:50.299372 | fa163e0d-6f45-64a1-ca76-000000004e38 | TASK | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_5 2026-01-22 13:10:50.338419 | fa163e0d-6f45-64a1-ca76-000000004e38 | SKIPPED | Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_5 | standalone 2026-01-22 13:10:50.340972 | fa163e0d-6f45-64a1-ca76-000000004e38 | TIMING | tripleo_container_manage : Delete orphan containers from /var/lib/tripleo-config/container-puppet-config/step_5 | standalone | 0:31:44.305970 | 0.04s 2026-01-22 13:10:50.406314 | fa163e0d-6f45-64a1-ca76-000000004e39 | TASK | Create containers from /var/lib/tripleo-config/container-puppet-config/step_5 2026-01-22 13:10:50.438189 | fa163e0d-6f45-64a1-ca76-000000004e39 | SKIPPED | Create containers from /var/lib/tripleo-config/container-puppet-config/step_5 | standalone 2026-01-22 13:10:50.439383 | fa163e0d-6f45-64a1-ca76-000000004e39 | TIMING | tripleo_container_manage : Create containers from /var/lib/tripleo-config/container-puppet-config/step_5 | standalone | 0:31:44.404390 | 0.03s PLAY [Server Post Deployments] ************************************************* 2026-01-22 13:10:50.621198 | fa163e0d-6f45-64a1-ca76-0000000000f7 | TASK | Server Post Deployments 2026-01-22 13:10:50.641758 | fa163e0d-6f45-64a1-ca76-0000000000f7 | OK | Server Post Deployments | standalone -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'Server Post Deployments' to resume from this task" } [WARNING]: ('standalone -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000f7') missing from stats 2026-01-22 13:10:50.708095 | fa163e0d-6f45-64a1-ca76-0000000000f8 | TIMING | include_tasks | standalone | 0:31:44.673100 | 0.04s PLAY [External deployment Post Deploy tasks] *********************************** 2026-01-22 13:10:50.839052 | fa163e0d-6f45-64a1-ca76-0000000000fc | TASK | External deployment Post Deploy tasks 2026-01-22 13:10:50.869144 | fa163e0d-6f45-64a1-ca76-0000000000fc | OK | External deployment Post Deploy tasks | undercloud -> localhost | result={ "changed": false, "msg": "Use --start-at-task 'External deployment Post Deploy tasks' to resume from this task" } [WARNING]: ('undercloud -> localhost', 'fa163e0d-6f45-64a1-ca76-0000000000fc') missing from stats 2026-01-22 13:10:50.886947 | fa163e0d-6f45-64a1-ca76-000000000100 | TASK | is additonal Cell? 2026-01-22 13:10:50.925643 | fa163e0d-6f45-64a1-ca76-000000000100 | OK | is additonal Cell? | undercloud 2026-01-22 13:10:50.927966 | fa163e0d-6f45-64a1-ca76-000000000100 | TIMING | is additonal Cell? | undercloud | 0:31:44.892972 | 0.04s 2026-01-22 13:10:50.944277 | fa163e0d-6f45-64a1-ca76-000000000102 | TASK | discover via nova_manager? 2026-01-22 13:10:51.028000 | fa163e0d-6f45-64a1-ca76-000000000102 | SKIPPED | discover via nova_manager? | undercloud 2026-01-22 13:10:51.028900 | fa163e0d-6f45-64a1-ca76-000000000102 | TIMING | discover via nova_manager? | undercloud | 0:31:44.993911 | 0.08s 2026-01-22 13:10:51.043924 | fa163e0d-6f45-64a1-ca76-000000000103 | TASK | discover via nova_api? 2026-01-22 13:10:51.112330 | fa163e0d-6f45-64a1-ca76-000000000103 | OK | discover via nova_api? | undercloud 2026-01-22 13:10:51.113943 | fa163e0d-6f45-64a1-ca76-000000000103 | TIMING | discover via nova_api? | undercloud | 0:31:45.078953 | 0.07s 2026-01-22 13:10:51.132334 | fa163e0d-6f45-64a1-ca76-000000000104 | TASK | Warn if no discovery host available 2026-01-22 13:10:51.182734 | fa163e0d-6f45-64a1-ca76-000000000104 | SKIPPED | Warn if no discovery host available | undercloud 2026-01-22 13:10:51.184285 | fa163e0d-6f45-64a1-ca76-000000000104 | TIMING | Warn if no discovery host available | undercloud | 0:31:45.149293 | 0.05s 2026-01-22 13:10:51.210683 | fa163e0d-6f45-64a1-ca76-000000000105 | TASK | Discovering nova hosts 2026-01-22 13:10:53.959490 | fa163e0d-6f45-64a1-ca76-000000000105 | OK | Discovering nova hosts | undercloud -> 192.168.122.100 [WARNING]: ('undercloud -> 192.168.122.100', 'fa163e0d-6f45-64a1-ca76-000000000105') missing from stats 2026-01-22 13:10:54.052002 | fa163e0d-6f45-64a1-ca76-000000000106 | OK | set_fact | undercloud 2026-01-22 13:10:54.054702 | fa163e0d-6f45-64a1-ca76-000000000106 | TIMING | set_fact | undercloud | 0:31:48.019661 | 0.07s 2026-01-22 13:10:54.074628 | fa163e0d-6f45-64a1-ca76-000000000108 | TASK | Set up group_vars 2026-01-22 13:10:54.123860 | fa163e0d-6f45-64a1-ca76-000000000108 | OK | Set up group_vars | undercloud 2026-01-22 13:10:54.126881 | fa163e0d-6f45-64a1-ca76-000000000108 | TIMING | Set up group_vars | undercloud | 0:31:48.091883 | 0.05s 2026-01-22 13:10:54.150867 | fa163e0d-6f45-64a1-ca76-000000000109 | TASK | Make needed directories on the undercloud 2026-01-22 13:10:54.402818 | fa163e0d-6f45-64a1-ca76-000000000109 | CHANGED | Make needed directories on the undercloud | undercloud | item=/root/standalone-ansible-mz1ymllk/octavia-ansible 2026-01-22 13:10:54.404473 | fa163e0d-6f45-64a1-ca76-000000000109 | TIMING | Make needed directories on the undercloud | undercloud | 0:31:48.369472 | 0.25s 2026-01-22 13:10:54.603880 | fa163e0d-6f45-64a1-ca76-000000000109 | CHANGED | Make needed directories on the undercloud | undercloud | item=/root/standalone-ansible-mz1ymllk/octavia-ansible/local_dir 2026-01-22 13:10:54.604910 | fa163e0d-6f45-64a1-ca76-000000000109 | TIMING | Make needed directories on the undercloud | undercloud | 0:31:48.569920 | 0.45s 2026-01-22 13:10:54.783416 | fa163e0d-6f45-64a1-ca76-000000000109 | CHANGED | Make needed directories on the undercloud | undercloud | item=/root/standalone-ansible-mz1ymllk/octavia-ansible/group_vars 2026-01-22 13:10:54.786250 | fa163e0d-6f45-64a1-ca76-000000000109 | TIMING | Make needed directories on the undercloud | undercloud | 0:31:48.751257 | 0.63s 2026-01-22 13:10:54.796466 | fa163e0d-6f45-64a1-ca76-000000000109 | TIMING | Make needed directories on the undercloud | undercloud | 0:31:48.761477 | 0.64s 2026-01-22 13:10:54.815149 | fa163e0d-6f45-64a1-ca76-00000000010a | TASK | Write group_vars file 2026-01-22 13:10:55.316314 | fa163e0d-6f45-64a1-ca76-00000000010a | CHANGED | Write group_vars file | undercloud 2026-01-22 13:10:55.317214 | fa163e0d-6f45-64a1-ca76-00000000010a | TIMING | Write group_vars file | undercloud | 0:31:49.282226 | 0.50s 2026-01-22 13:10:55.340527 | fa163e0d-6f45-64a1-ca76-00000000010b | TASK | Gather missing facts 2026-01-22 13:10:55.655662 | fa163e0d-6f45-64a1-ca76-00000000010b | OK | Gather missing facts | undercloud -> 192.168.122.100 | item=standalone [WARNING]: ('undercloud -> 192.168.122.100', 'fa163e0d-6f45-64a1-ca76-00000000010b') missing from stats 2026-01-22 13:10:55.670503 | fa163e0d-6f45-64a1-ca76-00000000010b | TIMING | Gather missing facts | undercloud | 0:31:49.635516 | 0.33s 2026-01-22 13:10:55.684717 | fa163e0d-6f45-64a1-ca76-00000000010c | TASK | Write octavia inventory 2026-01-22 13:10:56.173919 | fa163e0d-6f45-64a1-ca76-00000000010c | CHANGED | Write octavia inventory | undercloud 2026-01-22 13:10:56.175354 | fa163e0d-6f45-64a1-ca76-00000000010c | TIMING | Write octavia inventory | undercloud | 0:31:50.140360 | 0.49s 2026-01-22 13:10:56.197013 | fa163e0d-6f45-64a1-ca76-00000000010d | TASK | Check for ssh_private_key in working directory 2026-01-22 13:10:56.376583 | fa163e0d-6f45-64a1-ca76-00000000010d | OK | Check for ssh_private_key in working directory | undercloud 2026-01-22 13:10:56.379067 | fa163e0d-6f45-64a1-ca76-00000000010d | TIMING | Check for ssh_private_key in working directory | undercloud | 0:31:50.343976 | 0.18s 2026-01-22 13:10:56.398573 | fa163e0d-6f45-64a1-ca76-00000000010e | TASK | Set private key location 2026-01-22 13:10:56.451946 | fa163e0d-6f45-64a1-ca76-00000000010e | SKIPPED | Set private key location | undercloud 2026-01-22 13:10:56.454562 | fa163e0d-6f45-64a1-ca76-00000000010e | TIMING | Set private key location | undercloud | 0:31:50.419465 | 0.05s 2026-01-22 13:10:56.472491 | fa163e0d-6f45-64a1-ca76-00000000010f | TASK | Configure octavia command 2026-01-22 13:10:56.522680 | fa163e0d-6f45-64a1-ca76-00000000010f | OK | Configure octavia command | undercloud 2026-01-22 13:10:56.525329 | fa163e0d-6f45-64a1-ca76-00000000010f | TIMING | Configure octavia command | undercloud | 0:31:50.490232 | 0.05s 2026-01-22 13:10:56.585519 | fa163e0d-6f45-64a1-ca76-000000000110 | OK | set_fact | undercloud 2026-01-22 13:10:56.588105 | fa163e0d-6f45-64a1-ca76-000000000110 | TIMING | set_fact | undercloud | 0:31:50.553011 | 0.04s 2026-01-22 13:10:56.647618 | fa163e0d-6f45-64a1-ca76-000000000111 | OK | debug | undercloud | result={ "changed": false, "msg": "Configure Octavia command is: ANSIBLE_CONFIG=\"/root/standalone-ansible-mz1ymllk/ansible.cfg\" ansible-playbook -i \"/root/standalone-ansible-mz1ymllk/octavia-ansible/inventory.yaml\" --extra-vars @/root/standalone-ansible-mz1ymllk/octavia-ansible/group_vars/octavia_vars.yaml /usr/share/ansible/tripleo-playbooks/octavia-files.yaml" } 2026-01-22 13:10:56.650243 | fa163e0d-6f45-64a1-ca76-000000000111 | TIMING | debug | undercloud | 0:31:50.615132 | 0.04s 2026-01-22 13:10:56.669465 | fa163e0d-6f45-64a1-ca76-000000000112 | TASK | Configure octavia on overcloud 2026-01-22 13:14:12.496116 | fa163e0d-6f45-64a1-ca76-000000000112 | CHANGED | Configure octavia on overcloud | undercloud 2026-01-22 13:14:12.498703 | fa163e0d-6f45-64a1-ca76-000000000112 | TIMING | Configure octavia on overcloud | undercloud | 0:35:06.463713 | 195.83s 2026-01-22 13:14:12.515234 | fa163e0d-6f45-64a1-ca76-000000000113 | TASK | Purge temp dirs 2026-01-22 13:14:12.777367 | fa163e0d-6f45-64a1-ca76-000000000113 | CHANGED | Purge temp dirs | undercloud | item=/root/standalone-ansible-mz1ymllk/octavia-ansible/local_dir 2026-01-22 13:14:12.778248 | fa163e0d-6f45-64a1-ca76-000000000113 | TIMING | Purge temp dirs | undercloud | 0:35:06.743260 | 0.26s 2026-01-22 13:14:12.784281 | fa163e0d-6f45-64a1-ca76-000000000113 | TIMING | Purge temp dirs | undercloud | 0:35:06.749280 | 0.27s PLAY RECAP ********************************************************************* localhost : ok=1 changed=0 unreachable=0 failed=0 skipped=2 rescued=0 ignored=0 standalone : ok=797 changed=346 unreachable=0 failed=0 skipped=318 rescued=0 ignored=0 undercloud : ok=130 changed=53 unreachable=0 failed=0 skipped=10 rescued=0 ignored=2 2026-01-22 13:14:12.858981 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Summary Information ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2026-01-22 13:14:12.859570 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Total Tasks: 1295 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2026-01-22 13:14:12.860106 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Elapsed Time: 0:35:06.825120 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2026-01-22 13:14:12.860581 | UUID | Info | Host | Task Name | Run Time 2026-01-22 13:14:12.861121 | fa163e0d-6f45-64a1-ca76-0000000022b6 | SUMMARY | standalone | Pre-fetch all the containers | 284.79s 2026-01-22 13:14:12.861640 | fa163e0d-6f45-64a1-ca76-000000000112 | SUMMARY | undercloud | Configure octavia on overcloud | 195.83s 2026-01-22 13:14:12.862287 | fa163e0d-6f45-64a1-ca76-000000004a1b | SUMMARY | standalone | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_4 | 127.95s 2026-01-22 13:14:12.862801 | fa163e0d-6f45-64a1-ca76-00000000307f | SUMMARY | standalone | tripleo_ha_wrapper : Run init bundle puppet on the host for haproxy | 106.90s 2026-01-22 13:14:12.863324 | fa163e0d-6f45-64a1-ca76-000000002cab | SUMMARY | standalone | Wait for puppet host configuration to finish | 61.56s 2026-01-22 13:14:12.863887 | fa163e0d-6f45-64a1-ca76-0000000038a0 | SUMMARY | standalone | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_3 | 51.70s 2026-01-22 13:14:12.864400 | fa163e0d-6f45-64a1-ca76-00000000487f | SUMMARY | standalone | Wait for puppet host configuration to finish | 30.92s 2026-01-22 13:14:12.865339 | fa163e0d-6f45-64a1-ca76-000000002e49 | SUMMARY | standalone | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-puppet-config/step_1 | 30.86s 2026-01-22 13:14:12.865912 | fa163e0d-6f45-64a1-ca76-0000000030ab | SUMMARY | standalone | tripleo_ha_wrapper : Run init bundle puppet on the host for mysql | 29.30s 2026-01-22 13:14:12.866425 | fa163e0d-6f45-64a1-ca76-000000003103 | SUMMARY | standalone | tripleo_ha_wrapper : Run init bundle puppet on the host for redis | 28.08s 2026-01-22 13:14:12.867033 | fa163e0d-6f45-64a1-ca76-0000000038a1 | SUMMARY | standalone | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_3 | 27.37s 2026-01-22 13:14:12.867769 | fa163e0d-6f45-64a1-ca76-0000000030d7 | SUMMARY | standalone | tripleo_ha_wrapper : Run init bundle puppet on the host for oslo_messaging_rpc | 27.06s 2026-01-22 13:14:12.868330 | fa163e0d-6f45-64a1-ca76-000000004db1 | SUMMARY | standalone | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_5 | 25.60s 2026-01-22 13:14:12.869074 | fa163e0d-6f45-64a1-ca76-000000004b42 | SUMMARY | standalone | tripleo_ha_wrapper : Run init bundle puppet on the host for cinder_volume | 25.07s 2026-01-22 13:14:12.869773 | fa163e0d-6f45-64a1-ca76-00000000220e | SUMMARY | standalone | tripleo_container_tag : Pull registry.redhat.io/rhosp-rhel9/openstack-cinder-backup:17.1 image | 25.04s 2026-01-22 13:14:12.870445 | fa163e0d-6f45-64a1-ca76-000000004db2 | SUMMARY | standalone | tripleo_container_manage : Manage container systemd services and cleanup old systemd healthchecks for /var/lib/tripleo-config/container-startup-config/step_5 | 24.83s 2026-01-22 13:14:12.871361 | fa163e0d-6f45-64a1-ca76-000000004b16 | SUMMARY | standalone | tripleo_ha_wrapper : Run init bundle puppet on the host for cinder_backup | 24.82s 2026-01-22 13:14:12.872171 | fa163e0d-6f45-64a1-ca76-000000004b6e | SUMMARY | standalone | tripleo_ha_wrapper : Run init bundle puppet on the host for manila_share | 24.26s 2026-01-22 13:14:12.873375 | fa163e0d-6f45-64a1-ca76-00000000333c | SUMMARY | standalone | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_2 | 23.16s 2026-01-22 13:14:12.874073 | fa163e0d-6f45-64a1-ca76-000000004a1a | SUMMARY | standalone | tripleo_container_manage : Create containers managed by Podman for /var/lib/tripleo-config/container-startup-config/step_4 | 21.61s 2026-01-22 13:14:12.874717 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ End Summary Information ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Not cleaning working directory /root/tripleo-heat-installer-templates Not cleaning ansible directory /root/standalone-ansible-mz1ymllk Install artifact is located at /root/standalone-install-20260122131413.tar.bzip2 ######################################################## Deployment successful! ######################################################## ########################################################## Useful files: The clouds.yaml file is at ~/.config/openstack/clouds.yaml Use "export OS_CLOUD=standalone" before running the openstack command. ########################################################## Writing the stack virtual update mark file /var/lib/tripleo-heat-installer/update_mark_standalone + [[ libvirt == \i\r\o\n\i\c ]] + exit 0 + deploy_result=0 + ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i /home/zuul/.ssh/id_rsa root@192.168.122.100 'rm -f /tmp/standalone-deploy.sh' Warning: Permanently added '192.168.122.100' (ED25519) to the list of known hosts. + exit 0 + rv=0 + rm -rf -- /tmp/tmp.BZ3C5ORKFo + exit 0 ~/ci-framework-data/artifacts home/zuul/zuul-output/logs/ci-framework-data/logs/ansible.log0000644000175000017500000123070515134437262023466 0ustar zuulzuul2026-01-22 12:04:03,687 p=31865 u=zuul n=ansible | Starting galaxy collection install process 2026-01-22 12:04:03,688 p=31865 u=zuul n=ansible | Process install dependency map 2026-01-22 12:04:25,462 p=31865 u=zuul n=ansible | Starting collection install process 2026-01-22 12:04:25,462 p=31865 u=zuul n=ansible | Installing 'cifmw.general:1.0.0+daa79182' to '/home/zuul/.ansible/collections/ansible_collections/cifmw/general' 2026-01-22 12:04:25,996 p=31865 u=zuul n=ansible | Created collection for cifmw.general:1.0.0+daa79182 at /home/zuul/.ansible/collections/ansible_collections/cifmw/general 2026-01-22 12:04:25,997 p=31865 u=zuul n=ansible | cifmw.general:1.0.0+daa79182 was installed successfully 2026-01-22 12:04:25,997 p=31865 u=zuul n=ansible | Installing 'containers.podman:1.16.2' to '/home/zuul/.ansible/collections/ansible_collections/containers/podman' 2026-01-22 12:04:26,060 p=31865 u=zuul n=ansible | Created collection for containers.podman:1.16.2 at /home/zuul/.ansible/collections/ansible_collections/containers/podman 2026-01-22 12:04:26,060 p=31865 u=zuul n=ansible | containers.podman:1.16.2 was installed successfully 2026-01-22 12:04:26,060 p=31865 u=zuul n=ansible | Installing 'community.general:10.0.1' to '/home/zuul/.ansible/collections/ansible_collections/community/general' 2026-01-22 12:04:26,969 p=31865 u=zuul n=ansible | Created collection for community.general:10.0.1 at /home/zuul/.ansible/collections/ansible_collections/community/general 2026-01-22 12:04:26,970 p=31865 u=zuul n=ansible | community.general:10.0.1 was installed successfully 2026-01-22 12:04:26,970 p=31865 u=zuul n=ansible | Installing 'ansible.posix:1.6.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/posix' 2026-01-22 12:04:27,032 p=31865 u=zuul n=ansible | Created collection for ansible.posix:1.6.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/posix 2026-01-22 12:04:27,032 p=31865 u=zuul n=ansible | ansible.posix:1.6.2 was installed successfully 2026-01-22 12:04:27,033 p=31865 u=zuul n=ansible | Installing 'ansible.utils:5.1.2' to '/home/zuul/.ansible/collections/ansible_collections/ansible/utils' 2026-01-22 12:04:27,153 p=31865 u=zuul n=ansible | Created collection for ansible.utils:5.1.2 at /home/zuul/.ansible/collections/ansible_collections/ansible/utils 2026-01-22 12:04:27,154 p=31865 u=zuul n=ansible | ansible.utils:5.1.2 was installed successfully 2026-01-22 12:04:27,154 p=31865 u=zuul n=ansible | Installing 'community.libvirt:1.3.0' to '/home/zuul/.ansible/collections/ansible_collections/community/libvirt' 2026-01-22 12:04:27,188 p=31865 u=zuul n=ansible | Created collection for community.libvirt:1.3.0 at /home/zuul/.ansible/collections/ansible_collections/community/libvirt 2026-01-22 12:04:27,188 p=31865 u=zuul n=ansible | community.libvirt:1.3.0 was installed successfully 2026-01-22 12:04:27,188 p=31865 u=zuul n=ansible | Installing 'community.crypto:2.22.3' to '/home/zuul/.ansible/collections/ansible_collections/community/crypto' 2026-01-22 12:04:27,344 p=31865 u=zuul n=ansible | Created collection for community.crypto:2.22.3 at /home/zuul/.ansible/collections/ansible_collections/community/crypto 2026-01-22 12:04:27,344 p=31865 u=zuul n=ansible | community.crypto:2.22.3 was installed successfully 2026-01-22 12:04:27,344 p=31865 u=zuul n=ansible | Installing 'kubernetes.core:5.0.0' to '/home/zuul/.ansible/collections/ansible_collections/kubernetes/core' 2026-01-22 12:04:27,470 p=31865 u=zuul n=ansible | Created collection for kubernetes.core:5.0.0 at /home/zuul/.ansible/collections/ansible_collections/kubernetes/core 2026-01-22 12:04:27,470 p=31865 u=zuul n=ansible | kubernetes.core:5.0.0 was installed successfully 2026-01-22 12:04:27,470 p=31865 u=zuul n=ansible | Installing 'ansible.netcommon:7.1.0' to '/home/zuul/.ansible/collections/ansible_collections/ansible/netcommon' 2026-01-22 12:04:27,539 p=31865 u=zuul n=ansible | Created collection for ansible.netcommon:7.1.0 at /home/zuul/.ansible/collections/ansible_collections/ansible/netcommon 2026-01-22 12:04:27,540 p=31865 u=zuul n=ansible | ansible.netcommon:7.1.0 was installed successfully 2026-01-22 12:04:27,540 p=31865 u=zuul n=ansible | Installing 'openstack.config_template:2.1.1' to '/home/zuul/.ansible/collections/ansible_collections/openstack/config_template' 2026-01-22 12:04:27,557 p=31865 u=zuul n=ansible | Created collection for openstack.config_template:2.1.1 at /home/zuul/.ansible/collections/ansible_collections/openstack/config_template 2026-01-22 12:04:27,557 p=31865 u=zuul n=ansible | openstack.config_template:2.1.1 was installed successfully 2026-01-22 12:04:27,557 p=31865 u=zuul n=ansible | Installing 'junipernetworks.junos:9.1.0' to '/home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos' 2026-01-22 12:04:27,787 p=31865 u=zuul n=ansible | Created collection for junipernetworks.junos:9.1.0 at /home/zuul/.ansible/collections/ansible_collections/junipernetworks/junos 2026-01-22 12:04:27,787 p=31865 u=zuul n=ansible | junipernetworks.junos:9.1.0 was installed successfully 2026-01-22 12:04:27,787 p=31865 u=zuul n=ansible | Installing 'cisco.ios:9.0.3' to '/home/zuul/.ansible/collections/ansible_collections/cisco/ios' 2026-01-22 12:04:28,061 p=31865 u=zuul n=ansible | Created collection for cisco.ios:9.0.3 at /home/zuul/.ansible/collections/ansible_collections/cisco/ios 2026-01-22 12:04:28,061 p=31865 u=zuul n=ansible | cisco.ios:9.0.3 was installed successfully 2026-01-22 12:04:28,061 p=31865 u=zuul n=ansible | Installing 'mellanox.onyx:1.0.0' to '/home/zuul/.ansible/collections/ansible_collections/mellanox/onyx' 2026-01-22 12:04:28,093 p=31865 u=zuul n=ansible | Created collection for mellanox.onyx:1.0.0 at /home/zuul/.ansible/collections/ansible_collections/mellanox/onyx 2026-01-22 12:04:28,093 p=31865 u=zuul n=ansible | mellanox.onyx:1.0.0 was installed successfully 2026-01-22 12:04:28,093 p=31865 u=zuul n=ansible | Installing 'community.okd:4.0.0' to '/home/zuul/.ansible/collections/ansible_collections/community/okd' 2026-01-22 12:04:28,122 p=31865 u=zuul n=ansible | Created collection for community.okd:4.0.0 at /home/zuul/.ansible/collections/ansible_collections/community/okd 2026-01-22 12:04:28,122 p=31865 u=zuul n=ansible | community.okd:4.0.0 was installed successfully 2026-01-22 12:04:28,122 p=31865 u=zuul n=ansible | Installing '@NAMESPACE@.@NAME@:3.1.4' to '/home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@' 2026-01-22 12:04:28,213 p=31865 u=zuul n=ansible | Created collection for @NAMESPACE@.@NAME@:3.1.4 at /home/zuul/.ansible/collections/ansible_collections/@NAMESPACE@/@NAME@ 2026-01-22 12:04:28,213 p=31865 u=zuul n=ansible | @NAMESPACE@.@NAME@:3.1.4 was installed successfully 2026-01-22 12:04:46,624 p=33295 u=zuul n=ansible | [WARNING]: Invalid characters were found in group names but not replaced, use -vvvv to see details 2026-01-22 12:04:47,311 p=33295 u=zuul n=ansible | PLAY [localhost] *************************************************************** 2026-01-22 12:04:47,333 p=33295 u=zuul n=ansible | TASK [Gathering Facts ] ******************************************************** 2026-01-22 12:04:47,333 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:47 +0000 (0:00:00.042) 0:00:00.042 ****** 2026-01-22 12:04:47,333 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:47 +0000 (0:00:00.040) 0:00:00.040 ****** 2026-01-22 12:04:48,447 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:48,470 p=33295 u=zuul n=ansible | TASK [Create bootstrap_common_vars file src={{ playbook_dir }}/files/common_bootstrap_params.yml.j2, dest={{ bootstrap_common_vars }}] *** 2026-01-22 12:04:48,470 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:48 +0000 (0:00:01.136) 0:00:01.179 ****** 2026-01-22 12:04:48,470 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:48 +0000 (0:00:01.137) 0:00:01.177 ****** 2026-01-22 12:04:49,175 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:04:49,182 p=33295 u=zuul n=ansible | TASK [Set vars as fact - bootstrap common vars name=cifmw_helpers, tasks_from=var_file.yml] *** 2026-01-22 12:04:49,182 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.712) 0:00:01.891 ****** 2026-01-22 12:04:49,182 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.712) 0:00:01.889 ****** 2026-01-22 12:04:49,326 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Fail if file is not yaml or yml extension msg=File needs to be yaml/yml extension] *** 2026-01-22 12:04:49,327 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.144) 0:00:02.036 ****** 2026-01-22 12:04:49,327 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.144) 0:00:02.034 ****** 2026-01-22 12:04:49,348 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:04:49,355 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Check if file is available path={{ provided_file | trim }}] *** 2026-01-22 12:04:49,355 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.028) 0:00:02.064 ****** 2026-01-22 12:04:49,355 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.028) 0:00:02.062 ****** 2026-01-22 12:04:49,547 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:49,558 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Read the vars] ******************************************* 2026-01-22 12:04:49,558 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.203) 0:00:02.268 ****** 2026-01-22 12:04:49,558 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.203) 0:00:02.266 ****** 2026-01-22 12:04:49,807 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:49,816 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Set vars as fact] **************************************** 2026-01-22 12:04:49,816 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.257) 0:00:02.525 ****** 2026-01-22 12:04:49,816 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.257) 0:00:02.523 ****** 2026-01-22 12:04:49,839 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:04:49,843 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:49,854 p=33295 u=zuul n=ansible | TASK [Include vars from cifmw_extras _raw_params={{ item | replace('@','') }}] *** 2026-01-22 12:04:49,854 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.038) 0:00:02.564 ****** 2026-01-22 12:04:49,854 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.038) 0:00:02.562 ****** 2026-01-22 12:04:49,875 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:04:49,882 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Set custom cifmw PATH reusable fact cifmw_path={{ ansible_user_dir }}/.crc/bin:{{ ansible_user_dir }}/.crc/bin/oc:{{ ansible_user_dir }}/bin:{{ ansible_env.PATH }}, cacheable=True] *** 2026-01-22 12:04:49,882 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.027) 0:00:02.591 ****** 2026-01-22 12:04:49,882 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.027) 0:00:02.589 ****** 2026-01-22 12:04:49,907 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:49,914 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Get customized parameters ci_framework_params={{ hostvars[inventory_hostname] | dict2items | selectattr("key", "match", "^(cifmw|pre|post)_(?!install_yamls|openshift_token|openshift_login|openshift_kubeconfig).*") | list | items2dict }}] *** 2026-01-22 12:04:49,914 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.032) 0:00:02.624 ****** 2026-01-22 12:04:49,914 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.032) 0:00:02.622 ****** 2026-01-22 12:04:49,965 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:49,972 p=33295 u=zuul n=ansible | TASK [install_ca : Ensure target directory exists path={{ cifmw_install_ca_trust_dir }}, state=directory, mode=0755] *** 2026-01-22 12:04:49,973 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.058) 0:00:02.682 ****** 2026-01-22 12:04:49,973 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:49 +0000 (0:00:00.058) 0:00:02.680 ****** 2026-01-22 12:04:50,406 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:50,416 p=33295 u=zuul n=ansible | TASK [install_ca : Install internal CA from url url={{ cifmw_install_ca_url }}, dest={{ cifmw_install_ca_trust_dir }}, validate_certs={{ cifmw_install_ca_url_validate_certs | default(omit) }}, mode=0644] *** 2026-01-22 12:04:50,416 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.443) 0:00:03.126 ****** 2026-01-22 12:04:50,416 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.443) 0:00:03.124 ****** 2026-01-22 12:04:50,454 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:04:50,471 p=33295 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from inline dest={{ cifmw_install_ca_trust_dir }}/cifmw_inline_ca_bundle.crt, content={{ cifmw_install_ca_bundle_inline }}, mode=0644] *** 2026-01-22 12:04:50,472 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.055) 0:00:03.181 ****** 2026-01-22 12:04:50,472 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.055) 0:00:03.179 ****** 2026-01-22 12:04:50,510 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:04:50,519 p=33295 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from file dest={{ cifmw_install_ca_trust_dir }}/{{ cifmw_install_ca_bundle_src | basename }}, src={{ cifmw_install_ca_bundle_src }}, mode=0644] *** 2026-01-22 12:04:50,519 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.047) 0:00:03.229 ****** 2026-01-22 12:04:50,519 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.047) 0:00:03.227 ****** 2026-01-22 12:04:50,544 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:04:50,551 p=33295 u=zuul n=ansible | TASK [install_ca : Update ca bundle _raw_params=update-ca-trust] *************** 2026-01-22 12:04:50,552 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.032) 0:00:03.261 ****** 2026-01-22 12:04:50,552 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:50 +0000 (0:00:00.032) 0:00:03.259 ****** 2026-01-22 12:04:52,108 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:04:52,122 p=33295 u=zuul n=ansible | TASK [repo_setup : Ensure directories are present path={{ cifmw_repo_setup_basedir }}/{{ item }}, state=directory, mode=0755] *** 2026-01-22 12:04:52,122 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:52 +0000 (0:00:01.570) 0:00:04.831 ****** 2026-01-22 12:04:52,122 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:52 +0000 (0:00:01.570) 0:00:04.829 ****** 2026-01-22 12:04:52,316 p=33295 u=zuul n=ansible | changed: [localhost] => (item=tmp) 2026-01-22 12:04:52,498 p=33295 u=zuul n=ansible | changed: [localhost] => (item=artifacts/repositories) 2026-01-22 12:04:52,702 p=33295 u=zuul n=ansible | changed: [localhost] => (item=venv/repo_setup) 2026-01-22 12:04:52,713 p=33295 u=zuul n=ansible | TASK [repo_setup : Make sure git-core package is installed name=git-core, state=present] *** 2026-01-22 12:04:52,713 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:52 +0000 (0:00:00.591) 0:00:05.423 ****** 2026-01-22 12:04:52,714 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:52 +0000 (0:00:00.591) 0:00:05.421 ****** 2026-01-22 12:04:53,725 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:04:53,735 p=33295 u=zuul n=ansible | TASK [repo_setup : Get repo-setup repository accept_hostkey=True, dest={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, repo={{ cifmw_repo_setup_src }}] *** 2026-01-22 12:04:53,735 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:53 +0000 (0:00:01.021) 0:00:06.444 ****** 2026-01-22 12:04:53,735 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:53 +0000 (0:00:01.021) 0:00:06.442 ****** 2026-01-22 12:04:54,697 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:04:54,709 p=33295 u=zuul n=ansible | TASK [repo_setup : Initialize python venv and install requirements virtualenv={{ cifmw_repo_setup_venv }}, requirements={{ cifmw_repo_setup_basedir }}/tmp/repo-setup/requirements.txt, virtualenv_command=python3 -m venv --system-site-packages --upgrade-deps] *** 2026-01-22 12:04:54,709 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:54 +0000 (0:00:00.974) 0:00:07.418 ****** 2026-01-22 12:04:54,709 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:04:54 +0000 (0:00:00.974) 0:00:07.416 ****** 2026-01-22 12:05:04,192 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:04,200 p=33295 u=zuul n=ansible | TASK [repo_setup : Install repo-setup package chdir={{ cifmw_repo_setup_basedir }}/tmp/repo-setup, creates={{ cifmw_repo_setup_venv }}/bin/repo-setup, _raw_params={{ cifmw_repo_setup_venv }}/bin/python setup.py install] *** 2026-01-22 12:05:04,200 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:04 +0000 (0:00:09.490) 0:00:16.909 ****** 2026-01-22 12:05:04,200 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:04 +0000 (0:00:09.490) 0:00:16.907 ****** 2026-01-22 12:05:04,984 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:04,993 p=33295 u=zuul n=ansible | TASK [repo_setup : Set cifmw_repo_setup_dlrn_hash_tag from content provider cifmw_repo_setup_dlrn_hash_tag={{ content_provider_dlrn_md5_hash }}] *** 2026-01-22 12:05:04,993 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:04 +0000 (0:00:00.793) 0:00:17.703 ****** 2026-01-22 12:05:04,994 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:04 +0000 (0:00:00.793) 0:00:17.701 ****** 2026-01-22 12:05:05,016 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:05,023 p=33295 u=zuul n=ansible | TASK [repo_setup : Run repo-setup _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup {{ cifmw_repo_setup_promotion }} {{ cifmw_repo_setup_additional_repos }} -d {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} -b {{ cifmw_repo_setup_branch }} --rdo-mirror {{ cifmw_repo_setup_rdo_mirror }} {% if cifmw_repo_setup_dlrn_hash_tag | length > 0 %} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif %} -o {{ cifmw_repo_setup_output }}] *** 2026-01-22 12:05:05,023 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.029) 0:00:17.733 ****** 2026-01-22 12:05:05,023 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.029) 0:00:17.731 ****** 2026-01-22 12:05:05,640 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:05,648 p=33295 u=zuul n=ansible | TASK [repo_setup : Get component repo url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/component/{{ cifmw_repo_setup_component_name }}/{{ cifmw_repo_setup_component_promotion_tag }}/delorean.repo, dest={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, mode=0644] *** 2026-01-22 12:05:05,649 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.625) 0:00:18.358 ****** 2026-01-22 12:05:05,649 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.625) 0:00:18.356 ****** 2026-01-22 12:05:05,679 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:05,688 p=33295 u=zuul n=ansible | TASK [repo_setup : Rename component repo path={{ cifmw_repo_setup_output }}/{{ cifmw_repo_setup_component_name }}_{{ cifmw_repo_setup_component_promotion_tag }}_delorean.repo, regexp=delorean-component-{{ cifmw_repo_setup_component_name }}, replace={{ cifmw_repo_setup_component_name }}-{{ cifmw_repo_setup_component_promotion_tag }}] *** 2026-01-22 12:05:05,688 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.039) 0:00:18.397 ****** 2026-01-22 12:05:05,688 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.039) 0:00:18.395 ****** 2026-01-22 12:05:05,719 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:05,727 p=33295 u=zuul n=ansible | TASK [repo_setup : Disable component repo in current-podified dlrn repo path={{ cifmw_repo_setup_output }}/delorean.repo, section=delorean-component-{{ cifmw_repo_setup_component_name }}, option=enabled, value=0, mode=0644] *** 2026-01-22 12:05:05,728 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.039) 0:00:18.437 ****** 2026-01-22 12:05:05,728 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.039) 0:00:18.435 ****** 2026-01-22 12:05:05,762 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:05,770 p=33295 u=zuul n=ansible | TASK [repo_setup : Run repo-setup-get-hash _raw_params={{ cifmw_repo_setup_venv }}/bin/repo-setup-get-hash --dlrn-url {{ cifmw_repo_setup_dlrn_uri[:-1] }} --os-version {{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }} --release {{ cifmw_repo_setup_branch }} {% if cifmw_repo_setup_component_name | length > 0 -%} --component {{ cifmw_repo_setup_component_name }} --tag {{ cifmw_repo_setup_component_promotion_tag }} {% else -%} --tag {{cifmw_repo_setup_promotion }} {% endif -%} {% if (cifmw_repo_setup_dlrn_hash_tag | length > 0) and (cifmw_repo_setup_component_name | length <= 0) -%} --dlrn-hash-tag {{ cifmw_repo_setup_dlrn_hash_tag }} {% endif -%} --json] *** 2026-01-22 12:05:05,770 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.042) 0:00:18.479 ****** 2026-01-22 12:05:05,770 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:05 +0000 (0:00:00.042) 0:00:18.477 ****** 2026-01-22 12:05:06,223 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:06,231 p=33295 u=zuul n=ansible | TASK [repo_setup : Dump full hash in delorean.repo.md5 file content={{ _repo_setup_json['full_hash'] }} , dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2026-01-22 12:05:06,231 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.460) 0:00:18.940 ****** 2026-01-22 12:05:06,231 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.460) 0:00:18.938 ****** 2026-01-22 12:05:06,643 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:06,651 p=33295 u=zuul n=ansible | TASK [repo_setup : Dump current-podified hash url={{ cifmw_repo_setup_dlrn_uri }}/{{ cifmw_repo_setup_os_release }}{{ cifmw_repo_setup_dist_major_version }}-{{ cifmw_repo_setup_branch }}/current-podified/delorean.repo.md5, dest={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5, mode=0644] *** 2026-01-22 12:05:06,651 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.419) 0:00:19.360 ****** 2026-01-22 12:05:06,651 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.420) 0:00:19.358 ****** 2026-01-22 12:05:06,666 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,674 p=33295 u=zuul n=ansible | TASK [repo_setup : Slurp current podified hash src={{ cifmw_repo_setup_basedir }}/artifacts/repositories/delorean.repo.md5] *** 2026-01-22 12:05:06,674 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.023) 0:00:19.384 ****** 2026-01-22 12:05:06,674 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.023) 0:00:19.382 ****** 2026-01-22 12:05:06,689 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,697 p=33295 u=zuul n=ansible | TASK [repo_setup : Update the value of full_hash _repo_setup_json={{ _repo_setup_json | combine({'full_hash': _hash}, recursive=true) }}] *** 2026-01-22 12:05:06,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.406 ****** 2026-01-22 12:05:06,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.404 ****** 2026-01-22 12:05:06,712 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,720 p=33295 u=zuul n=ansible | TASK [repo_setup : Export hashes facts for further use cifmw_repo_setup_full_hash={{ _repo_setup_json['full_hash'] }}, cifmw_repo_setup_commit_hash={{ _repo_setup_json['commit_hash'] }}, cifmw_repo_setup_distro_hash={{ _repo_setup_json['distro_hash'] }}, cifmw_repo_setup_extended_hash={{ _repo_setup_json['extended_hash'] }}, cifmw_repo_setup_dlrn_api_url={{ _repo_setup_json['dlrn_api_url'] }}, cifmw_repo_setup_dlrn_url={{ _repo_setup_json['dlrn_url'] }}, cifmw_repo_setup_release={{ _repo_setup_json['release'] }}, cacheable=True] *** 2026-01-22 12:05:06,720 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.429 ****** 2026-01-22 12:05:06,720 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.427 ****** 2026-01-22 12:05:06,746 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:05:06,753 p=33295 u=zuul n=ansible | TASK [repo_setup : Create download directory path={{ cifmw_repo_setup_rhos_release_path }}, state=directory, mode=0755] *** 2026-01-22 12:05:06,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.033) 0:00:19.462 ****** 2026-01-22 12:05:06,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.033) 0:00:19.460 ****** 2026-01-22 12:05:06,767 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,776 p=33295 u=zuul n=ansible | TASK [repo_setup : Print the URL to request msg={{ cifmw_repo_setup_rhos_release_rpm }}] *** 2026-01-22 12:05:06,776 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.485 ****** 2026-01-22 12:05:06,776 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.483 ****** 2026-01-22 12:05:06,790 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,798 p=33295 u=zuul n=ansible | TASK [Download the RPM name=krb_request] *************************************** 2026-01-22 12:05:06,798 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.507 ****** 2026-01-22 12:05:06,798 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.505 ****** 2026-01-22 12:05:06,812 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,820 p=33295 u=zuul n=ansible | TASK [repo_setup : Install RHOS Release tool name={{ cifmw_repo_setup_rhos_release_rpm if cifmw_repo_setup_rhos_release_rpm is not url else cifmw_krb_request_out.path }}, state=present, disable_gpg_check={{ cifmw_repo_setup_rhos_release_gpg_check | bool }}] *** 2026-01-22 12:05:06,820 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.021) 0:00:19.529 ****** 2026-01-22 12:05:06,820 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.527 ****** 2026-01-22 12:05:06,833 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,841 p=33295 u=zuul n=ansible | TASK [repo_setup : Get rhos-release tool version _raw_params=rhos-release --version] *** 2026-01-22 12:05:06,841 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.021) 0:00:19.551 ****** 2026-01-22 12:05:06,841 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.021) 0:00:19.549 ****** 2026-01-22 12:05:06,861 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,871 p=33295 u=zuul n=ansible | TASK [repo_setup : Print rhos-release tool version msg={{ rr_version.stdout }}] *** 2026-01-22 12:05:06,871 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.030) 0:00:19.581 ****** 2026-01-22 12:05:06,872 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.030) 0:00:19.579 ****** 2026-01-22 12:05:06,891 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,900 p=33295 u=zuul n=ansible | TASK [repo_setup : Generate repos using rhos-release {{ cifmw_repo_setup_rhos_release_args }} _raw_params=rhos-release {{ cifmw_repo_setup_rhos_release_args }} \ -t {{ cifmw_repo_setup_output }}] *** 2026-01-22 12:05:06,900 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.028) 0:00:19.609 ****** 2026-01-22 12:05:06,900 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.028) 0:00:19.607 ****** 2026-01-22 12:05:06,914 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:06,922 p=33295 u=zuul n=ansible | TASK [repo_setup : Check for /etc/ci/mirror_info.sh path=/etc/ci/mirror_info.sh] *** 2026-01-22 12:05:06,922 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.021) 0:00:19.631 ****** 2026-01-22 12:05:06,922 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:06 +0000 (0:00:00.022) 0:00:19.629 ****** 2026-01-22 12:05:07,114 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:05:07,122 p=33295 u=zuul n=ansible | TASK [repo_setup : Use RDO proxy mirrors chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|https://trunk.rdoproject.org|$NODEPOOL_RDO_PROXY|g" *.repo ] *** 2026-01-22 12:05:07,122 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.200) 0:00:19.831 ****** 2026-01-22 12:05:07,122 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.200) 0:00:19.829 ****** 2026-01-22 12:05:07,342 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:07,349 p=33295 u=zuul n=ansible | TASK [repo_setup : Use RDO CentOS mirrors (remove CentOS 10 conditional when Nodepool mirrors exist) chdir={{ cifmw_repo_setup_output }}, _raw_params=set -o pipefail source /etc/ci/mirror_info.sh sed -i -e "s|http://mirror.stream.centos.org|$NODEPOOL_CENTOS_MIRROR|g" *.repo ] *** 2026-01-22 12:05:07,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.227) 0:00:20.059 ****** 2026-01-22 12:05:07,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.227) 0:00:20.057 ****** 2026-01-22 12:05:07,567 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:07,576 p=33295 u=zuul n=ansible | TASK [repo_setup : Check for gating.repo file on content provider url=http://{{ content_provider_registry_ip }}:8766/gating.repo] *** 2026-01-22 12:05:07,576 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.226) 0:00:20.286 ****** 2026-01-22 12:05:07,577 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.226) 0:00:20.284 ****** 2026-01-22 12:05:07,598 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,605 p=33295 u=zuul n=ansible | TASK [repo_setup : Populate gating repo from content provider ip content=[gating-repo] baseurl=http://{{ content_provider_registry_ip }}:8766/ enabled=1 gpgcheck=0 priority=1 , dest={{ cifmw_repo_setup_output }}/gating.repo, mode=0644] *** 2026-01-22 12:05:07,605 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.028) 0:00:20.315 ****** 2026-01-22 12:05:07,605 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.028) 0:00:20.313 ****** 2026-01-22 12:05:07,632 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,644 p=33295 u=zuul n=ansible | TASK [repo_setup : Check for DLRN repo at the destination path={{ cifmw_repo_setup_output }}/delorean.repo] *** 2026-01-22 12:05:07,644 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.038) 0:00:20.353 ****** 2026-01-22 12:05:07,644 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.038) 0:00:20.351 ****** 2026-01-22 12:05:07,673 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,683 p=33295 u=zuul n=ansible | TASK [repo_setup : Lower the priority of DLRN repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}/delorean.repo, regexp=priority=1, replace=priority=20] *** 2026-01-22 12:05:07,683 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.039) 0:00:20.393 ****** 2026-01-22 12:05:07,683 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.039) 0:00:20.391 ****** 2026-01-22 12:05:07,708 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,719 p=33295 u=zuul n=ansible | TASK [repo_setup : Check for DLRN component repo path={{ cifmw_repo_setup_output }}/{{ _comp_repo }}] *** 2026-01-22 12:05:07,719 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.036) 0:00:20.429 ****** 2026-01-22 12:05:07,720 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.036) 0:00:20.427 ****** 2026-01-22 12:05:07,745 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,752 p=33295 u=zuul n=ansible | TASK [repo_setup : Lower the priority of componennt repos to allow installation from gating repo path={{ cifmw_repo_setup_output }}//{{ _comp_repo }}, regexp=priority=1, replace=priority=2] *** 2026-01-22 12:05:07,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.033) 0:00:20.462 ****** 2026-01-22 12:05:07,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.033) 0:00:20.460 ****** 2026-01-22 12:05:07,782 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:07,789 p=33295 u=zuul n=ansible | TASK [repo_setup : Find existing repos from /etc/yum.repos.d directory paths=/etc/yum.repos.d/, patterns=*.repo, recurse=False] *** 2026-01-22 12:05:07,789 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.036) 0:00:20.499 ****** 2026-01-22 12:05:07,789 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:07 +0000 (0:00:00.036) 0:00:20.497 ****** 2026-01-22 12:05:08,123 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:05:08,134 p=33295 u=zuul n=ansible | TASK [repo_setup : Remove existing repos from /etc/yum.repos.d directory path={{ item }}, state=absent] *** 2026-01-22 12:05:08,134 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:08 +0000 (0:00:00.344) 0:00:20.843 ****** 2026-01-22 12:05:08,134 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:08 +0000 (0:00:00.344) 0:00:20.841 ****** 2026-01-22 12:05:08,346 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos-addons.repo) 2026-01-22 12:05:08,530 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/etc/yum.repos.d/centos.repo) 2026-01-22 12:05:08,539 p=33295 u=zuul n=ansible | TASK [repo_setup : Cleanup existing metadata _raw_params=dnf clean metadata] *** 2026-01-22 12:05:08,540 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:08 +0000 (0:00:00.405) 0:00:21.249 ****** 2026-01-22 12:05:08,540 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:08 +0000 (0:00:00.405) 0:00:21.247 ****** 2026-01-22 12:05:09,032 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:09,040 p=33295 u=zuul n=ansible | TASK [repo_setup : Copy generated repos to /etc/yum.repos.d directory mode=0755, remote_src=True, src={{ cifmw_repo_setup_output }}/, dest=/etc/yum.repos.d] *** 2026-01-22 12:05:09,040 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.500) 0:00:21.750 ****** 2026-01-22 12:05:09,040 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.500) 0:00:21.748 ****** 2026-01-22 12:05:09,299 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:09,313 p=33295 u=zuul n=ansible | TASK [ci_setup : Gather variables for each operating system _raw_params={{ item }}] *** 2026-01-22 12:05:09,313 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.272) 0:00:22.022 ****** 2026-01-22 12:05:09,313 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.272) 0:00:22.020 ****** 2026-01-22 12:05:09,349 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_setup/vars/redhat.yml) 2026-01-22 12:05:09,358 p=33295 u=zuul n=ansible | TASK [ci_setup : List packages to install var=cifmw_ci_setup_packages] ********* 2026-01-22 12:05:09,358 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.044) 0:00:22.067 ****** 2026-01-22 12:05:09,358 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.044) 0:00:22.065 ****** 2026-01-22 12:05:09,375 p=33295 u=zuul n=ansible | ok: [localhost] => cifmw_ci_setup_packages: - bash-completion - ca-certificates - git-core - make - tar - tmux - python3-pip 2026-01-22 12:05:09,382 p=33295 u=zuul n=ansible | TASK [ci_setup : Install needed packages name={{ cifmw_ci_setup_packages }}, state=latest] *** 2026-01-22 12:05:09,382 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.024) 0:00:22.092 ****** 2026-01-22 12:05:09,382 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:09 +0000 (0:00:00.024) 0:00:22.090 ****** 2026-01-22 12:05:52,717 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:52,732 p=33295 u=zuul n=ansible | TASK [ci_setup : Gather version of openshift client _raw_params=oc version --client -o yaml] *** 2026-01-22 12:05:52,732 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:52 +0000 (0:00:43.349) 0:01:05.441 ****** 2026-01-22 12:05:52,732 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:52 +0000 (0:00:43.349) 0:01:05.439 ****** 2026-01-22 12:05:52,914 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:05:52,922 p=33295 u=zuul n=ansible | TASK [ci_setup : Ensure openshift client install path is present path={{ cifmw_ci_setup_oc_install_path }}, state=directory, mode=0755] *** 2026-01-22 12:05:52,922 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:52 +0000 (0:00:00.190) 0:01:05.631 ****** 2026-01-22 12:05:52,922 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:52 +0000 (0:00:00.190) 0:01:05.629 ****** 2026-01-22 12:05:53,104 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:53,112 p=33295 u=zuul n=ansible | TASK [ci_setup : Install openshift client src={{ cifmw_ci_setup_openshift_client_download_uri }}/{{ cifmw_ci_setup_openshift_client_version }}/openshift-client-linux.tar.gz, dest={{ cifmw_ci_setup_oc_install_path }}, remote_src=True, mode=0755, creates={{ cifmw_ci_setup_oc_install_path }}/oc] *** 2026-01-22 12:05:53,113 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:53 +0000 (0:00:00.190) 0:01:05.822 ****** 2026-01-22 12:05:53,113 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:53 +0000 (0:00:00.190) 0:01:05.820 ****** 2026-01-22 12:05:58,406 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:58,414 p=33295 u=zuul n=ansible | TASK [ci_setup : Add the OC path to cifmw_path if needed cifmw_path={{ cifmw_ci_setup_oc_install_path }}:{{ ansible_env.PATH }}, cacheable=True] *** 2026-01-22 12:05:58,414 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:05.301) 0:01:11.123 ****** 2026-01-22 12:05:58,414 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:05.301) 0:01:11.121 ****** 2026-01-22 12:05:58,440 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:58,448 p=33295 u=zuul n=ansible | TASK [ci_setup : Create completion file] *************************************** 2026-01-22 12:05:58,448 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:00.033) 0:01:11.157 ****** 2026-01-22 12:05:58,448 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:00.033) 0:01:11.155 ****** 2026-01-22 12:05:58,765 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:58,773 p=33295 u=zuul n=ansible | TASK [ci_setup : Source completion from within .bashrc create=True, mode=0644, path={{ ansible_user_dir }}/.bashrc, block=if [ -f ~/.oc_completion ]; then source ~/.oc_completion fi] *** 2026-01-22 12:05:58,773 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:00.325) 0:01:11.482 ****** 2026-01-22 12:05:58,773 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:58 +0000 (0:00:00.325) 0:01:11.480 ****** 2026-01-22 12:05:59,083 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:05:59,091 p=33295 u=zuul n=ansible | TASK [ci_setup : Check rhsm status _raw_params=subscription-manager status] **** 2026-01-22 12:05:59,091 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.317) 0:01:11.800 ****** 2026-01-22 12:05:59,091 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.317) 0:01:11.798 ****** 2026-01-22 12:05:59,107 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,115 p=33295 u=zuul n=ansible | TASK [ci_setup : Gather the repos to be enabled _repos={{ cifmw_ci_setup_rhel_rhsm_default_repos + (cifmw_ci_setup_rhel_rhsm_extra_repos | default([])) }}] *** 2026-01-22 12:05:59,115 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.023) 0:01:11.824 ****** 2026-01-22 12:05:59,115 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.023) 0:01:11.822 ****** 2026-01-22 12:05:59,130 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,137 p=33295 u=zuul n=ansible | TASK [ci_setup : Enabling the required repositories. name={{ item }}, state={{ rhsm_repo_state | default('enabled') }}] *** 2026-01-22 12:05:59,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.847 ****** 2026-01-22 12:05:59,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.845 ****** 2026-01-22 12:05:59,153 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,160 p=33295 u=zuul n=ansible | TASK [ci_setup : Get current /etc/redhat-release _raw_params=cat /etc/redhat-release] *** 2026-01-22 12:05:59,160 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.023) 0:01:11.870 ****** 2026-01-22 12:05:59,160 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.023) 0:01:11.868 ****** 2026-01-22 12:05:59,176 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,183 p=33295 u=zuul n=ansible | TASK [ci_setup : Print current /etc/redhat-release msg={{ _current_rh_release.stdout }}] *** 2026-01-22 12:05:59,183 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.893 ****** 2026-01-22 12:05:59,183 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.891 ****** 2026-01-22 12:05:59,198 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,206 p=33295 u=zuul n=ansible | TASK [ci_setup : Ensure the repos are enabled in the system using yum name={{ item.name }}, baseurl={{ item.baseurl }}, description={{ item.description | default(item.name) }}, gpgcheck={{ item.gpgcheck | default(false) }}, enabled=True, state={{ yum_repo_state | default('present') }}] *** 2026-01-22 12:05:59,206 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.915 ****** 2026-01-22 12:05:59,206 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.022) 0:01:11.913 ****** 2026-01-22 12:05:59,226 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:05:59,233 p=33295 u=zuul n=ansible | TASK [ci_setup : Manage directories path={{ item }}, state={{ directory_state }}, mode=0755, owner={{ ansible_user_id }}, group={{ ansible_user_id }}] *** 2026-01-22 12:05:59,233 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.027) 0:01:11.943 ****** 2026-01-22 12:05:59,234 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:05:59 +0000 (0:00:00.027) 0:01:11.941 ****** 2026-01-22 12:05:59,460 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/manifests/openstack/cr) 2026-01-22 12:05:59,645 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/logs) 2026-01-22 12:05:59,835 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/tmp) 2026-01-22 12:06:00,045 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/volumes) 2026-01-22 12:06:00,239 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2026-01-22 12:06:00,256 p=33295 u=zuul n=ansible | TASK [Prepare install_yamls make targets name=install_yamls, apply={'tags': ['bootstrap']}] *** 2026-01-22 12:06:00,257 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:01.023) 0:01:12.966 ****** 2026-01-22 12:06:00,257 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:01.023) 0:01:12.964 ****** 2026-01-22 12:06:00,392 p=33295 u=zuul n=ansible | TASK [install_yamls : Ensure directories exist path={{ item }}, state=directory, mode=0755] *** 2026-01-22 12:06:00,393 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:00.135) 0:01:13.102 ****** 2026-01-22 12:06:00,393 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:00.135) 0:01:13.100 ****** 2026-01-22 12:06:00,592 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts) 2026-01-22 12:06:00,772 p=33295 u=zuul n=ansible | changed: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes/tasks) 2026-01-22 12:06:00,961 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2026-01-22 12:06:00,970 p=33295 u=zuul n=ansible | TASK [Create variables with local repos based on Zuul items name=install_yamls, tasks_from=zuul_set_operators_repo.yml] *** 2026-01-22 12:06:00,971 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:00.578) 0:01:13.680 ****** 2026-01-22 12:06:00,971 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:00 +0000 (0:00:00.578) 0:01:13.678 ****** 2026-01-22 12:06:01,009 p=33295 u=zuul n=ansible | TASK [install_yamls : Set fact with local repos based on Zuul items cifmw_install_yamls_operators_repo={{ cifmw_install_yamls_operators_repo | default({}) | combine(_repo_operator_info | items2dict) }}] *** 2026-01-22 12:06:01,010 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.038) 0:01:13.719 ****** 2026-01-22 12:06:01,010 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.038) 0:01:13.717 ****** 2026-01-22 12:06:01,035 p=33295 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'main', 'change': '1202', 'change_url': 'https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202', 'commit_id': 'a6dda39287a9d88f8d44f99969c3909ff61d8792', 'patchset': 'a6dda39287a9d88f8d44f99969c3909ff61d8792', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/openstack-k8s-operators/data-plane-adoption', 'name': 'openstack-k8s-operators/data-plane-adoption', 'short_name': 'data-plane-adoption', 'src_dir': 'src/github.com/openstack-k8s-operators/data-plane-adoption'}, 'topic': None}) 2026-01-22 12:06:01,036 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,043 p=33295 u=zuul n=ansible | TASK [install_yamls : Print helpful data for debugging msg=_repo_operator_name: {{ _repo_operator_name }} _repo_operator_info: {{ _repo_operator_info }} cifmw_install_yamls_operators_repo: {{ cifmw_install_yamls_operators_repo }} ] *** 2026-01-22 12:06:01,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.033) 0:01:13.753 ****** 2026-01-22 12:06:01,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.033) 0:01:13.751 ****** 2026-01-22 12:06:01,070 p=33295 u=zuul n=ansible | skipping: [localhost] => (item={'branch': 'main', 'change': '1202', 'change_url': 'https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202', 'commit_id': 'a6dda39287a9d88f8d44f99969c3909ff61d8792', 'patchset': 'a6dda39287a9d88f8d44f99969c3909ff61d8792', 'project': {'canonical_hostname': 'github.com', 'canonical_name': 'github.com/openstack-k8s-operators/data-plane-adoption', 'name': 'openstack-k8s-operators/data-plane-adoption', 'short_name': 'data-plane-adoption', 'src_dir': 'src/github.com/openstack-k8s-operators/data-plane-adoption'}, 'topic': None}) 2026-01-22 12:06:01,071 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,085 p=33295 u=zuul n=ansible | TASK [Customize install_yamls devsetup vars if needed name=install_yamls, tasks_from=customize_devsetup_vars.yml] *** 2026-01-22 12:06:01,085 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.041) 0:01:13.794 ****** 2026-01-22 12:06:01,085 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.041) 0:01:13.792 ****** 2026-01-22 12:06:01,129 p=33295 u=zuul n=ansible | TASK [install_yamls : Update opm_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^opm_version:, line=opm_version: {{ cifmw_install_yamls_opm_version }}, state=present] *** 2026-01-22 12:06:01,129 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.043) 0:01:13.838 ****** 2026-01-22 12:06:01,129 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.043) 0:01:13.836 ****** 2026-01-22 12:06:01,149 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,157 p=33295 u=zuul n=ansible | TASK [install_yamls : Update sdk_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^sdk_version:, line=sdk_version: {{ cifmw_install_yamls_sdk_version }}, state=present] *** 2026-01-22 12:06:01,157 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.028) 0:01:13.866 ****** 2026-01-22 12:06:01,157 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.028) 0:01:13.864 ****** 2026-01-22 12:06:01,179 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,186 p=33295 u=zuul n=ansible | TASK [install_yamls : Update go_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^go_version:, line=go_version: {{ cifmw_install_yamls_go_version }}, state=present] *** 2026-01-22 12:06:01,186 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.029) 0:01:13.896 ****** 2026-01-22 12:06:01,187 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.029) 0:01:13.894 ****** 2026-01-22 12:06:01,207 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,214 p=33295 u=zuul n=ansible | TASK [install_yamls : Update kustomize_version in install_yamls devsetup/vars/default.yaml path={{ cifmw_install_yamls_repo }}/devsetup/vars/default.yaml, regexp=^kustomize_version:, line=kustomize_version: {{ cifmw_install_yamls_kustomize_version }}, state=present] *** 2026-01-22 12:06:01,214 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.027) 0:01:13.923 ****** 2026-01-22 12:06:01,214 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.027) 0:01:13.921 ****** 2026-01-22 12:06:01,234 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:01,250 p=33295 u=zuul n=ansible | TASK [install_yamls : Compute the cifmw_install_yamls_vars final value _install_yamls_override_vars={{ _install_yamls_override_vars | default({}) | combine(item, recursive=True) }}] *** 2026-01-22 12:06:01,250 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.036) 0:01:13.960 ****** 2026-01-22 12:06:01,250 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.036) 0:01:13.958 ****** 2026-01-22 12:06:01,318 p=33295 u=zuul n=ansible | ok: [localhost] => (item={'BMO_SETUP': False}) 2026-01-22 12:06:01,327 p=33295 u=zuul n=ansible | TASK [install_yamls : Set environment override cifmw_install_yamls_environment fact cifmw_install_yamls_environment={{ _install_yamls_override_vars.keys() | map('upper') | zip(_install_yamls_override_vars.values()) | items2dict(key_name=0, value_name=1) | combine({ 'OUT': cifmw_install_yamls_manifests_dir, 'OUTPUT_DIR': cifmw_install_yamls_edpm_dir, 'CHECKOUT_FROM_OPENSTACK_REF': cifmw_install_yamls_checkout_openstack_ref, 'OPENSTACK_K8S_BRANCH': (zuul is defined and not zuul.branch |regex_search('master|antelope|rhos')) | ternary(zuul.branch, 'main') }) | combine(install_yamls_operators_repos) }}, cacheable=True] *** 2026-01-22 12:06:01,328 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.077) 0:01:14.037 ****** 2026-01-22 12:06:01,328 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.077) 0:01:14.035 ****** 2026-01-22 12:06:01,365 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:01,371 p=33295 u=zuul n=ansible | TASK [install_yamls : Get environment structure base_path={{ cifmw_install_yamls_repo }}] *** 2026-01-22 12:06:01,371 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.043) 0:01:14.081 ****** 2026-01-22 12:06:01,372 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.043) 0:01:14.079 ****** 2026-01-22 12:06:01,944 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:01,955 p=33295 u=zuul n=ansible | TASK [install_yamls : Ensure Output directory exists path={{ cifmw_install_yamls_out_dir }}, state=directory, mode=0755] *** 2026-01-22 12:06:01,955 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.583) 0:01:14.665 ****** 2026-01-22 12:06:01,955 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:01 +0000 (0:00:00.583) 0:01:14.663 ****** 2026-01-22 12:06:02,150 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:02,157 p=33295 u=zuul n=ansible | TASK [install_yamls : Ensure user cifmw_install_yamls_vars contains existing Makefile variables that=_cifmw_install_yamls_unmatched_vars | length == 0, msg=cifmw_install_yamls_vars contains a variable that is not defined in install_yamls Makefile nor cifmw_install_yamls_whitelisted_vars: {{ _cifmw_install_yamls_unmatched_vars | join(', ')}}, quiet=True] *** 2026-01-22 12:06:02,158 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.202) 0:01:14.867 ****** 2026-01-22 12:06:02,158 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.202) 0:01:14.865 ****** 2026-01-22 12:06:02,197 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:02,211 p=33295 u=zuul n=ansible | TASK [install_yamls : Generate /home/zuul/ci-framework-data/artifacts/install_yamls.sh dest={{ cifmw_install_yamls_out_dir }}/{{ cifmw_install_yamls_envfile }}, content={% for k,v in cifmw_install_yamls_environment.items() %} export {{ k }}={{ v }} {% endfor %}, mode=0644] *** 2026-01-22 12:06:02,211 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.053) 0:01:14.920 ****** 2026-01-22 12:06:02,211 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.053) 0:01:14.918 ****** 2026-01-22 12:06:02,636 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:02,643 p=33295 u=zuul n=ansible | TASK [install_yamls : Set install_yamls default values cifmw_install_yamls_defaults={{ get_makefiles_env_output.makefiles_values | combine(cifmw_install_yamls_environment) }}, cacheable=True] *** 2026-01-22 12:06:02,643 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.432) 0:01:15.353 ****** 2026-01-22 12:06:02,643 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.432) 0:01:15.351 ****** 2026-01-22 12:06:02,669 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:02,678 p=33295 u=zuul n=ansible | TASK [install_yamls : Show the env structure var=cifmw_install_yamls_environment] *** 2026-01-22 12:06:02,678 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.034) 0:01:15.387 ****** 2026-01-22 12:06:02,678 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.034) 0:01:15.385 ****** 2026-01-22 12:06:02,698 p=33295 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_environment: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm 2026-01-22 12:06:02,708 p=33295 u=zuul n=ansible | TASK [install_yamls : Show the env structure defaults var=cifmw_install_yamls_defaults] *** 2026-01-22 12:06:02,708 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.029) 0:01:15.417 ****** 2026-01-22 12:06:02,708 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.029) 0:01:15.415 ****** 2026-01-22 12:06:02,741 p=33295 u=zuul n=ansible | ok: [localhost] => cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sE**********U= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_OS_IMG_TYPE: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: false BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 76**********f0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: CO**********6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '12**********42' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12**********78' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: os**********et SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' 2026-01-22 12:06:02,752 p=33295 u=zuul n=ansible | TASK [install_yamls : Generate make targets install_yamls_path={{ cifmw_install_yamls_repo }}, output_directory={{ cifmw_install_yamls_tasks_out }}] *** 2026-01-22 12:06:02,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.044) 0:01:15.462 ****** 2026-01-22 12:06:02,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:02 +0000 (0:00:00.044) 0:01:15.460 ****** 2026-01-22 12:06:03,097 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:03,107 p=33295 u=zuul n=ansible | TASK [install_yamls : Debug generate_make module var=cifmw_generate_makes] ***** 2026-01-22 12:06:03,107 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.354) 0:01:15.816 ****** 2026-01-22 12:06:03,107 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.354) 0:01:15.814 ****** 2026-01-22 12:06:03,130 p=33295 u=zuul n=ansible | ok: [localhost] => cifmw_generate_makes: changed: false debug: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/Makefile: - all - help - cleanup - deploy_cleanup - wait - crc_storage - crc_storage_cleanup - crc_storage_release - crc_storage_with_retries - crc_storage_cleanup_with_retries - operator_namespace - namespace - namespace_cleanup - input - input_cleanup - crc_bmo_setup - crc_bmo_cleanup - openstack_prep - openstack - openstack_wait - openstack_init - openstack_cleanup - openstack_repo - openstack_deploy_prep - openstack_deploy - openstack_wait_deploy - openstack_deploy_cleanup - openstack_update_run - update_services - update_system - openstack_patch_version - edpm_deploy_generate_keys - edpm_patch_ansible_runner_image - edpm_deploy_prep - edpm_deploy_cleanup - edpm_deploy - edpm_deploy_baremetal_prep - edpm_deploy_baremetal - edpm_wait_deploy_baremetal - edpm_wait_deploy - edpm_register_dns - edpm_nova_discover_hosts - openstack_crds - openstack_crds_cleanup - edpm_deploy_networker_prep - edpm_deploy_networker_cleanup - edpm_deploy_networker - infra_prep - infra - infra_cleanup - dns_deploy_prep - dns_deploy - dns_deploy_cleanup - netconfig_deploy_prep - netconfig_deploy - netconfig_deploy_cleanup - memcached_deploy_prep - memcached_deploy - memcached_deploy_cleanup - keystone_prep - keystone - keystone_cleanup - keystone_deploy_prep - keystone_deploy - keystone_deploy_cleanup - barbican_prep - barbican - barbican_cleanup - barbican_deploy_prep - barbican_deploy - barbican_deploy_validate - barbican_deploy_cleanup - mariadb - mariadb_cleanup - mariadb_deploy_prep - mariadb_deploy - mariadb_deploy_cleanup - placement_prep - placement - placement_cleanup - placement_deploy_prep - placement_deploy - placement_deploy_cleanup - glance_prep - glance - glance_cleanup - glance_deploy_prep - glance_deploy - glance_deploy_cleanup - ovn_prep - ovn - ovn_cleanup - ovn_deploy_prep - ovn_deploy - ovn_deploy_cleanup - neutron_prep - neutron - neutron_cleanup - neutron_deploy_prep - neutron_deploy - neutron_deploy_cleanup - cinder_prep - cinder - cinder_cleanup - cinder_deploy_prep - cinder_deploy - cinder_deploy_cleanup - rabbitmq_prep - rabbitmq - rabbitmq_cleanup - rabbitmq_deploy_prep - rabbitmq_deploy - rabbitmq_deploy_cleanup - ironic_prep - ironic - ironic_cleanup - ironic_deploy_prep - ironic_deploy - ironic_deploy_cleanup - octavia_prep - octavia - octavia_cleanup - octavia_deploy_prep - octavia_deploy - octavia_deploy_cleanup - designate_prep - designate - designate_cleanup - designate_deploy_prep - designate_deploy - designate_deploy_cleanup - nova_prep - nova - nova_cleanup - nova_deploy_prep - nova_deploy - nova_deploy_cleanup - mariadb_kuttl_run - mariadb_kuttl - kuttl_db_prep - kuttl_db_cleanup - kuttl_common_prep - kuttl_common_cleanup - keystone_kuttl_run - keystone_kuttl - barbican_kuttl_run - barbican_kuttl - placement_kuttl_run - placement_kuttl - cinder_kuttl_run - cinder_kuttl - neutron_kuttl_run - neutron_kuttl - octavia_kuttl_run - octavia_kuttl - designate_kuttl - designate_kuttl_run - ovn_kuttl_run - ovn_kuttl - infra_kuttl_run - infra_kuttl - ironic_kuttl_run - ironic_kuttl - ironic_kuttl_crc - heat_kuttl_run - heat_kuttl - heat_kuttl_crc - ansibleee_kuttl_run - ansibleee_kuttl_cleanup - ansibleee_kuttl_prep - ansibleee_kuttl - glance_kuttl_run - glance_kuttl - manila_kuttl_run - manila_kuttl - swift_kuttl_run - swift_kuttl - horizon_kuttl_run - horizon_kuttl - openstack_kuttl_run - openstack_kuttl - mariadb_chainsaw_run - mariadb_chainsaw - horizon_prep - horizon - horizon_cleanup - horizon_deploy_prep - horizon_deploy - horizon_deploy_cleanup - heat_prep - heat - heat_cleanup - heat_deploy_prep - heat_deploy - heat_deploy_cleanup - ansibleee_prep - ansibleee - ansibleee_cleanup - baremetal_prep - baremetal - baremetal_cleanup - ceph_help - ceph - ceph_cleanup - rook_prep - rook - rook_deploy_prep - rook_deploy - rook_crc_disk - rook_cleanup - lvms - nmstate - nncp - nncp_cleanup - netattach - netattach_cleanup - metallb - metallb_config - metallb_config_cleanup - metallb_cleanup - loki - loki_cleanup - loki_deploy - loki_deploy_cleanup - netobserv - netobserv_cleanup - netobserv_deploy - netobserv_deploy_cleanup - manila_prep - manila - manila_cleanup - manila_deploy_prep - manila_deploy - manila_deploy_cleanup - telemetry_prep - telemetry - telemetry_cleanup - telemetry_deploy_prep - telemetry_deploy - telemetry_deploy_cleanup - telemetry_kuttl_run - telemetry_kuttl - swift_prep - swift - swift_cleanup - swift_deploy_prep - swift_deploy - swift_deploy_cleanup - certmanager - certmanager_cleanup - validate_marketplace - redis_deploy_prep - redis_deploy - redis_deploy_cleanup - set_slower_etcd_profile /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/Makefile: - help - download_tools - nfs - nfs_cleanup - crc - crc_cleanup - crc_scrub - crc_attach_default_interface - crc_attach_default_interface_cleanup - ipv6_lab_network - ipv6_lab_network_cleanup - ipv6_lab_nat64_router - ipv6_lab_nat64_router_cleanup - ipv6_lab_sno - ipv6_lab_sno_cleanup - ipv6_lab - ipv6_lab_cleanup - attach_default_interface - attach_default_interface_cleanup - network_isolation_bridge - network_isolation_bridge_cleanup - edpm_baremetal_compute - edpm_compute - edpm_compute_bootc - edpm_ansible_runner - edpm_computes_bgp - edpm_compute_repos - edpm_compute_cleanup - edpm_networker - edpm_networker_cleanup - edpm_deploy_instance - tripleo_deploy - standalone_deploy - standalone_sync - standalone - standalone_cleanup - standalone_snapshot - standalone_revert - cifmw_prepare - cifmw_cleanup - bmaas_network - bmaas_network_cleanup - bmaas_route_crc_and_crc_bmaas_networks - bmaas_route_crc_and_crc_bmaas_networks_cleanup - bmaas_crc_attach_network - bmaas_crc_attach_network_cleanup - bmaas_crc_baremetal_bridge - bmaas_crc_baremetal_bridge_cleanup - bmaas_baremetal_net_nad - bmaas_baremetal_net_nad_cleanup - bmaas_metallb - bmaas_metallb_cleanup - bmaas_virtual_bms - bmaas_virtual_bms_cleanup - bmaas_sushy_emulator - bmaas_sushy_emulator_cleanup - bmaas_sushy_emulator_wait - bmaas_generate_nodes_yaml - bmaas - bmaas_cleanup failed: false success: true 2026-01-22 12:06:03,142 p=33295 u=zuul n=ansible | TASK [install_yamls : Create the install_yamls parameters file dest={{ cifmw_basedir }}/artifacts/parameters/install-yamls-params.yml, content={{ { 'cifmw_install_yamls_environment': cifmw_install_yamls_environment, 'cifmw_install_yamls_defaults': cifmw_install_yamls_defaults } | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:06:03,142 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.035) 0:01:15.852 ****** 2026-01-22 12:06:03,143 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.035) 0:01:15.850 ****** 2026-01-22 12:06:03,529 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:03,538 p=33295 u=zuul n=ansible | TASK [install_yamls : Create empty cifmw_install_yamls_environment if needed cifmw_install_yamls_environment={}] *** 2026-01-22 12:06:03,538 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.395) 0:01:16.247 ****** 2026-01-22 12:06:03,538 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.395) 0:01:16.245 ****** 2026-01-22 12:06:03,556 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:03,572 p=33295 u=zuul n=ansible | TASK [discover_latest_image : Get latest image url={{ cifmw_discover_latest_image_base_url }}, image_prefix={{ cifmw_discover_latest_image_qcow_prefix }}, images_file={{ cifmw_discover_latest_image_images_file }}] *** 2026-01-22 12:06:03,572 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.034) 0:01:16.282 ****** 2026-01-22 12:06:03,573 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:03 +0000 (0:00:00.034) 0:01:16.280 ****** 2026-01-22 12:06:04,201 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:04,209 p=33295 u=zuul n=ansible | TASK [discover_latest_image : Export facts accordingly cifmw_discovered_image_name={{ discovered_image['data']['image_name'] }}, cifmw_discovered_image_url={{ discovered_image['data']['image_url'] }}, cifmw_discovered_hash={{ discovered_image['data']['hash'] }}, cifmw_discovered_hash_algorithm={{ discovered_image['data']['hash_algorithm'] }}, cacheable=True] *** 2026-01-22 12:06:04,210 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.637) 0:01:16.919 ****** 2026-01-22 12:06:04,210 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.637) 0:01:16.917 ****** 2026-01-22 12:06:04,234 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:04,249 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Create artifacts with custom params mode=0644, dest={{ cifmw_basedir }}/artifacts/parameters/custom-params.yml, content={{ ci_framework_params | to_nice_yaml }}] *** 2026-01-22 12:06:04,249 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.039) 0:01:16.959 ****** 2026-01-22 12:06:04,250 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.039) 0:01:16.957 ****** 2026-01-22 12:06:04,656 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:04,672 p=33295 u=zuul n=ansible | TASK [Dump parameters to a file to pass them easily to cifmw playbooks src={{ playbook_dir }}/files/ci_framework_params.yaml.j2, dest={{ cifmw_parameters_file }}] *** 2026-01-22 12:06:04,672 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.422) 0:01:17.381 ****** 2026-01-22 12:06:04,672 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:04 +0000 (0:00:00.422) 0:01:17.379 ****** 2026-01-22 12:06:05,124 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:05,143 p=33295 u=zuul n=ansible | TASK [Set vars as fact - cifmw parameters name=cifmw_helpers, tasks_from=var_file.yml] *** 2026-01-22 12:06:05,143 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.471) 0:01:17.852 ****** 2026-01-22 12:06:05,143 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.471) 0:01:17.850 ****** 2026-01-22 12:06:05,301 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Fail if file is not yaml or yml extension msg=File needs to be yaml/yml extension] *** 2026-01-22 12:06:05,302 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.158) 0:01:18.011 ****** 2026-01-22 12:06:05,302 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.158) 0:01:18.009 ****** 2026-01-22 12:06:05,328 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:05,340 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Check if file is available path={{ provided_file | trim }}] *** 2026-01-22 12:06:05,340 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.038) 0:01:18.049 ****** 2026-01-22 12:06:05,340 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.038) 0:01:18.047 ****** 2026-01-22 12:06:05,530 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:05,538 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Read the vars] ******************************************* 2026-01-22 12:06:05,538 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.198) 0:01:18.248 ****** 2026-01-22 12:06:05,538 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.198) 0:01:18.246 ****** 2026-01-22 12:06:05,703 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:05,715 p=33295 u=zuul n=ansible | TASK [cifmw_helpers : Set vars as fact] **************************************** 2026-01-22 12:06:05,716 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.177) 0:01:18.425 ****** 2026-01-22 12:06:05,716 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.177) 0:01:18.423 ****** 2026-01-22 12:06:05,751 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,757 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,763 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,775 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,781 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,788 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,795 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,801 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,806 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,812 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,818 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,820 p=33295 u=zuul n=ansible | ok: [localhost] => (item=None) 2026-01-22 12:06:05,853 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:05,874 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:06:05,874 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.158) 0:01:18.583 ****** 2026-01-22 12:06:05,874 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.158) 0:01:18.581 ****** 2026-01-22 12:06:05,931 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:05,940 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:06:05,940 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.066) 0:01:18.649 ****** 2026-01-22 12:06:05,940 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:05 +0000 (0:00:00.065) 0:01:18.647 ****** 2026-01-22 12:06:06,010 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,022 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_infra _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:06:06,022 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.081) 0:01:18.731 ****** 2026-01-22 12:06:06,022 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.082) 0:01:18.729 ****** 2026-01-22 12:06:06,168 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': 'Download needed tools', 'inventory': 'localhost,', 'connection': 'local', 'type': 'playbook', 'source': '/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml'}) 2026-01-22 12:06:06,180 p=33295 u=zuul n=ansible | TASK [run_hook : Set playbook path for Download needed tools cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e namespace={{ cifmw_openstack_namespace }} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2026-01-22 12:06:06,180 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.158) 0:01:18.890 ****** 2026-01-22 12:06:06,181 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.158) 0:01:18.888 ****** 2026-01-22 12:06:06,224 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,232 p=33295 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2026-01-22 12:06:06,232 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.051) 0:01:18.942 ****** 2026-01-22 12:06:06,232 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.051) 0:01:18.940 ****** 2026-01-22 12:06:06,433 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,452 p=33295 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2026-01-22 12:06:06,452 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.219) 0:01:19.162 ****** 2026-01-22 12:06:06,452 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.220) 0:01:19.160 ****** 2026-01-22 12:06:06,475 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:06,485 p=33295 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2026-01-22 12:06:06,485 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.032) 0:01:19.194 ****** 2026-01-22 12:06:06,485 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.032) 0:01:19.192 ****** 2026-01-22 12:06:06,678 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,693 p=33295 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2026-01-22 12:06:06,693 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.207) 0:01:19.402 ****** 2026-01-22 12:06:06,693 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.208) 0:01:19.400 ****** 2026-01-22 12:06:06,720 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,730 p=33295 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2026-01-22 12:06:06,730 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.036) 0:01:19.439 ****** 2026-01-22 12:06:06,730 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.036) 0:01:19.437 ****** 2026-01-22 12:06:06,918 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:06,927 p=33295 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2026-01-22 12:06:06,927 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.197) 0:01:19.637 ****** 2026-01-22 12:06:06,927 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:06 +0000 (0:00:00.197) 0:01:19.635 ****** 2026-01-22 12:06:07,106 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:07,118 p=33295 u=zuul n=ansible | TASK [run_hook : Run hook without retry - Download needed tools] *************** 2026-01-22 12:06:07,118 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:07 +0000 (0:00:00.190) 0:01:19.827 ****** 2026-01-22 12:06:07,118 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:07 +0000 (0:00:00.190) 0:01:19.825 ****** 2026-01-22 12:06:07,168 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_000_run_hook_without_retry.log 2026-01-22 12:06:41,355 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:41,366 p=33295 u=zuul n=ansible | TASK [run_hook : Run hook with retry - Download needed tools] ****************** 2026-01-22 12:06:41,367 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:34.248) 0:01:54.076 ****** 2026-01-22 12:06:41,367 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:34.248) 0:01:54.074 ****** 2026-01-22 12:06:41,388 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:41,397 p=33295 u=zuul n=ansible | TASK [run_hook : Check if we have a file path={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2026-01-22 12:06:41,397 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.030) 0:01:54.106 ****** 2026-01-22 12:06:41,397 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.030) 0:01:54.105 ****** 2026-01-22 12:06:41,572 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:41,582 p=33295 u=zuul n=ansible | TASK [run_hook : Load generated content in main playbook file={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2026-01-22 12:06:41,582 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.184) 0:01:54.291 ****** 2026-01-22 12:06:41,582 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.184) 0:01:54.289 ****** 2026-01-22 12:06:41,602 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:41,698 p=33295 u=zuul n=ansible | PLAY [Prepare host virtualization] ********************************************* 2026-01-22 12:06:41,717 p=33295 u=zuul n=ansible | TASK [Include vars from cifmw_extras _raw_params={{ item | replace('@','') }}] *** 2026-01-22 12:06:41,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.135) 0:01:54.426 ****** 2026-01-22 12:06:41,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.135) 0:01:54.424 ****** 2026-01-22 12:06:41,740 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:41,748 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2026-01-22 12:06:41,749 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.031) 0:01:54.458 ****** 2026-01-22 12:06:41,749 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.031) 0:01:54.456 ****** 2026-01-22 12:06:41,792 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:41,801 p=33295 u=zuul n=ansible | TASK [Ensure libvirt is present/configured name=libvirt_manager] *************** 2026-01-22 12:06:41,801 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.052) 0:01:54.510 ****** 2026-01-22 12:06:41,801 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.052) 0:01:54.509 ****** 2026-01-22 12:06:41,824 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:41,832 p=33295 u=zuul n=ansible | TASK [Perpare OpenShift provisioner node name=openshift_provisioner_node] ****** 2026-01-22 12:06:41,832 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.030) 0:01:54.541 ****** 2026-01-22 12:06:41,832 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.030) 0:01:54.539 ****** 2026-01-22 12:06:41,853 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:41,904 p=33295 u=zuul n=ansible | PLAY [Run cifmw_setup infra, build package, container and operators, deploy EDPM] *** 2026-01-22 12:06:41,939 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2026-01-22 12:06:41,939 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.106) 0:01:54.648 ****** 2026-01-22 12:06:41,939 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.107) 0:01:54.646 ****** 2026-01-22 12:06:41,983 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:41,994 p=33295 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Environment Definition file existence path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2026-01-22 12:06:41,994 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.054) 0:01:54.703 ****** 2026-01-22 12:06:41,994 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:41 +0000 (0:00:00.054) 0:01:54.701 ****** 2026-01-22 12:06:42,178 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:42,193 p=33295 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Definition file existance that=['_net_env_def_stat.stat.exists'], msg=Ensure that the Networking Environment Definition file exists in {{ cifmw_networking_mapper_networking_env_def_path }}, quiet=True] *** 2026-01-22 12:06:42,193 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.199) 0:01:54.902 ****** 2026-01-22 12:06:42,193 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.199) 0:01:54.900 ****** 2026-01-22 12:06:42,216 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,231 p=33295 u=zuul n=ansible | TASK [networking_mapper : Load the Networking Definition from file path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2026-01-22 12:06:42,232 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.038) 0:01:54.941 ****** 2026-01-22 12:06:42,232 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.038) 0:01:54.939 ****** 2026-01-22 12:06:42,256 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,269 p=33295 u=zuul n=ansible | TASK [networking_mapper : Set cifmw_networking_env_definition is present cifmw_networking_env_definition={{ _net_env_def_slurp['content'] | b64decode | from_yaml }}, cacheable=True] *** 2026-01-22 12:06:42,269 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.037) 0:01:54.978 ****** 2026-01-22 12:06:42,269 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.037) 0:01:54.976 ****** 2026-01-22 12:06:42,292 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,315 p=33295 u=zuul n=ansible | TASK [Deploy OCP using Hive name=hive] ***************************************** 2026-01-22 12:06:42,315 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.045) 0:01:55.024 ****** 2026-01-22 12:06:42,315 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.045) 0:01:55.022 ****** 2026-01-22 12:06:42,340 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,349 p=33295 u=zuul n=ansible | TASK [Prepare CRC name=rhol_crc] *********************************************** 2026-01-22 12:06:42,349 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.034) 0:01:55.059 ****** 2026-01-22 12:06:42,349 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.034) 0:01:55.057 ****** 2026-01-22 12:06:42,372 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,382 p=33295 u=zuul n=ansible | TASK [Deploy OpenShift cluster using dev-scripts name=devscripts] ************** 2026-01-22 12:06:42,382 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.033) 0:01:55.092 ****** 2026-01-22 12:06:42,382 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.033) 0:01:55.090 ****** 2026-01-22 12:06:42,406 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,414 p=33295 u=zuul n=ansible | TASK [openshift_login : Ensure output directory exists path={{ cifmw_openshift_login_basedir }}/artifacts, state=directory, mode=0755] *** 2026-01-22 12:06:42,415 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.032) 0:01:55.124 ****** 2026-01-22 12:06:42,415 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.032) 0:01:55.122 ****** 2026-01-22 12:06:42,597 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:42,610 p=33295 u=zuul n=ansible | TASK [openshift_login : OpenShift login _raw_params=login.yml] ***************** 2026-01-22 12:06:42,610 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.195) 0:01:55.319 ****** 2026-01-22 12:06:42,610 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.195) 0:01:55.317 ****** 2026-01-22 12:06:42,649 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/login.yml for localhost 2026-01-22 12:06:42,669 p=33295 u=zuul n=ansible | TASK [openshift_login : Check if the password file is present path={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2026-01-22 12:06:42,669 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.059) 0:01:55.378 ****** 2026-01-22 12:06:42,669 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.059) 0:01:55.377 ****** 2026-01-22 12:06:42,693 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,702 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch user password content src={{ cifmw_openshift_login_password_file | default(cifmw_openshift_password_file) }}] *** 2026-01-22 12:06:42,702 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.033) 0:01:55.412 ****** 2026-01-22 12:06:42,702 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.033) 0:01:55.410 ****** 2026-01-22 12:06:42,725 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,734 p=33295 u=zuul n=ansible | TASK [openshift_login : Set user password as a fact cifmw_openshift_login_password={{ cifmw_openshift_login_password_file_slurp.content | b64decode }}, cacheable=True] *** 2026-01-22 12:06:42,734 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.031) 0:01:55.443 ****** 2026-01-22 12:06:42,734 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.031) 0:01:55.441 ****** 2026-01-22 12:06:42,762 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:42,772 p=33295 u=zuul n=ansible | TASK [openshift_login : Set role variables cifmw_openshift_login_kubeconfig={{ cifmw_openshift_login_kubeconfig | default(cifmw_openshift_kubeconfig) | default( ansible_env.KUBECONFIG if 'KUBECONFIG' in ansible_env else cifmw_openshift_login_kubeconfig_default_path ) | trim }}, cifmw_openshift_login_user={{ cifmw_openshift_login_user | default(cifmw_openshift_user) | default(omit) }}, cifmw_openshift_login_password={{********** cifmw_openshift_login_password | default(cifmw_openshift_password) | default(omit) }}, cifmw_openshift_login_api={{ cifmw_openshift_login_api | default(cifmw_openshift_api) | default(omit) }}, cifmw_openshift_login_cert_login={{ cifmw_openshift_login_cert_login | default(false)}}, cifmw_openshift_login_provided_token={{ cifmw_openshift_provided_token | default(omit) }}, cacheable=True] *** 2026-01-22 12:06:42,772 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.038) 0:01:55.481 ****** 2026-01-22 12:06:42,772 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.038) 0:01:55.479 ****** 2026-01-22 12:06:42,805 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:42,813 p=33295 u=zuul n=ansible | TASK [openshift_login : Check if kubeconfig exists path={{ cifmw_openshift_login_kubeconfig }}] *** 2026-01-22 12:06:42,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.040) 0:01:55.522 ****** 2026-01-22 12:06:42,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:42 +0000 (0:00:00.040) 0:01:55.520 ****** 2026-01-22 12:06:42,988 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:43,001 p=33295 u=zuul n=ansible | TASK [openshift_login : Assert that enough data is provided to log in to OpenShift that=cifmw_openshift_login_kubeconfig_stat.stat.exists or (cifmw_openshift_login_provided_token is defined and cifmw_openshift_login_provided_token != '') or ( (cifmw_openshift_login_user is defined) and (cifmw_openshift_login_password is defined) and (cifmw_openshift_login_api is defined) ), msg=If an existing kubeconfig is not provided user/pwd or provided/initial token and API URL must be given] *** 2026-01-22 12:06:43,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.188) 0:01:55.710 ****** 2026-01-22 12:06:43,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.188) 0:01:55.709 ****** 2026-01-22 12:06:43,034 p=33295 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:06:43,050 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch kubeconfig content src={{ cifmw_openshift_login_kubeconfig }}] *** 2026-01-22 12:06:43,051 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.049) 0:01:55.760 ****** 2026-01-22 12:06:43,051 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.049) 0:01:55.758 ****** 2026-01-22 12:06:43,076 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:43,091 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch x509 key based users cifmw_openshift_login_key_based_users={{ ( cifmw_openshift_login_kubeconfig_content_b64.content | b64decode | from_yaml ). users | default([]) | selectattr('user.client-certificate-data', 'defined') | map(attribute="name") | map("split", "/") | map("first") }}, cacheable=True] *** 2026-01-22 12:06:43,091 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.040) 0:01:55.800 ****** 2026-01-22 12:06:43,091 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.040) 0:01:55.798 ****** 2026-01-22 12:06:43,113 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:43,123 p=33295 u=zuul n=ansible | TASK [openshift_login : Assign key based user if not provided and available cifmw_openshift_login_user={{ (cifmw_openshift_login_assume_cert_system_user | ternary('system:', '')) + (cifmw_openshift_login_key_based_users | map('replace', 'system:', '') | unique | first) }}, cifmw_openshift_login_cert_login=True, cacheable=True] *** 2026-01-22 12:06:43,123 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.032) 0:01:55.832 ****** 2026-01-22 12:06:43,123 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.032) 0:01:55.830 ****** 2026-01-22 12:06:43,145 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:43,156 p=33295 u=zuul n=ansible | TASK [openshift_login : Set the retry count cifmw_openshift_login_retries_cnt={{ 0 if cifmw_openshift_login_retries_cnt is undefined else cifmw_openshift_login_retries_cnt|int + 1 }}] *** 2026-01-22 12:06:43,156 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.033) 0:01:55.865 ****** 2026-01-22 12:06:43,156 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.033) 0:01:55.863 ****** 2026-01-22 12:06:43,183 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:43,194 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch token _raw_params=try_login.yml] ***************** 2026-01-22 12:06:43,194 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.037) 0:01:55.903 ****** 2026-01-22 12:06:43,194 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.037) 0:01:55.901 ****** 2026-01-22 12:06:43,220 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/openshift_login/tasks/try_login.yml for localhost 2026-01-22 12:06:43,236 p=33295 u=zuul n=ansible | TASK [openshift_login : Try get OpenShift access token _raw_params=oc whoami -t] *** 2026-01-22 12:06:43,236 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.042) 0:01:55.946 ****** 2026-01-22 12:06:43,236 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.042) 0:01:55.944 ****** 2026-01-22 12:06:43,260 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:43,272 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift token output_dir={{ cifmw_openshift_login_basedir }}/artifacts, script=oc login {%- if cifmw_openshift_login_provided_token is not defined %} {%- if cifmw_openshift_login_user is defined %} -u {{ cifmw_openshift_login_user }} {%- endif %} {%- if cifmw_openshift_login_password is defined %} -p {{ cifmw_openshift_login_password }} {%- endif %} {% else %} --token={{ cifmw_openshift_login_provided_token }} {%- endif %} {%- if cifmw_openshift_login_skip_tls_verify|bool %} --insecure-skip-tls-verify=true {%- endif %} {%- if cifmw_openshift_login_api is defined %} {{ cifmw_openshift_login_api }} {%- endif %}] *** 2026-01-22 12:06:43,272 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.035) 0:01:55.981 ****** 2026-01-22 12:06:43,272 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.035) 0:01:55.979 ****** 2026-01-22 12:06:43,336 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_001_fetch_openshift.log 2026-01-22 12:06:43,659 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:43,677 p=33295 u=zuul n=ansible | TASK [openshift_login : Ensure kubeconfig is provided that=cifmw_openshift_login_kubeconfig != ""] *** 2026-01-22 12:06:43,677 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.404) 0:01:56.386 ****** 2026-01-22 12:06:43,677 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.405) 0:01:56.385 ****** 2026-01-22 12:06:43,704 p=33295 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:06:43,721 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch new OpenShift access token _raw_params=oc whoami -t] *** 2026-01-22 12:06:43,722 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.044) 0:01:56.431 ****** 2026-01-22 12:06:43,722 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:43 +0000 (0:00:00.044) 0:01:56.429 ****** 2026-01-22 12:06:44,181 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:44,197 p=33295 u=zuul n=ansible | TASK [openshift_login : Set new OpenShift token cifmw_openshift_login_token={{ (not cifmw_openshift_login_new_token_out.skipped | default(false)) | ternary(cifmw_openshift_login_new_token_out.stdout, cifmw_openshift_login_whoami_out.stdout) }}, cacheable=True] *** 2026-01-22 12:06:44,198 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.475) 0:01:56.907 ****** 2026-01-22 12:06:44,198 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.475) 0:01:56.905 ****** 2026-01-22 12:06:44,234 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:44,245 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift API URL _raw_params=oc whoami --show-server=true] *** 2026-01-22 12:06:44,245 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.047) 0:01:56.955 ****** 2026-01-22 12:06:44,246 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.047) 0:01:56.953 ****** 2026-01-22 12:06:44,592 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:44,601 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift kubeconfig context _raw_params=oc whoami -c] *** 2026-01-22 12:06:44,601 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.355) 0:01:57.310 ****** 2026-01-22 12:06:44,601 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.355) 0:01:57.308 ****** 2026-01-22 12:06:44,874 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:44,888 p=33295 u=zuul n=ansible | TASK [openshift_login : Fetch OpenShift current user _raw_params=oc whoami] **** 2026-01-22 12:06:44,888 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.287) 0:01:57.597 ****** 2026-01-22 12:06:44,888 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:44 +0000 (0:00:00.287) 0:01:57.595 ****** 2026-01-22 12:06:45,181 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:45,191 p=33295 u=zuul n=ansible | TASK [openshift_login : Set OpenShift user, context and API facts cifmw_openshift_login_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_login_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_login_user={{ _oauth_user }}, cifmw_openshift_kubeconfig={{ cifmw_openshift_login_kubeconfig }}, cifmw_openshift_api={{ cifmw_openshift_login_api_out.stdout }}, cifmw_openshift_context={{ cifmw_openshift_login_context_out.stdout }}, cifmw_openshift_user={{ _oauth_user }}, cifmw_openshift_token={{ cifmw_openshift_login_token | default(omit) }}, cifmw_install_yamls_environment={{ ( cifmw_install_yamls_environment | combine({'KUBECONFIG': cifmw_openshift_login_kubeconfig}) ) if cifmw_install_yamls_environment is defined else omit }}, cacheable=True] *** 2026-01-22 12:06:45,191 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.303) 0:01:57.901 ****** 2026-01-22 12:06:45,191 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.303) 0:01:57.899 ****** 2026-01-22 12:06:45,240 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:45,248 p=33295 u=zuul n=ansible | TASK [openshift_login : Create the openshift_login parameters file dest={{ cifmw_basedir }}/artifacts/parameters/openshift-login-params.yml, content={{ cifmw_openshift_login_params_content | from_yaml | to_nice_yaml }}, mode=0600] *** 2026-01-22 12:06:45,248 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.056) 0:01:57.958 ****** 2026-01-22 12:06:45,248 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.056) 0:01:57.956 ****** 2026-01-22 12:06:45,702 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:45,715 p=33295 u=zuul n=ansible | TASK [openshift_login : Read the install yamls parameters file path={{ cifmw_basedir }}/artifacts/parameters/install-yamls-params.yml] *** 2026-01-22 12:06:45,715 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.466) 0:01:58.424 ****** 2026-01-22 12:06:45,715 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.467) 0:01:58.423 ****** 2026-01-22 12:06:45,932 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:45,942 p=33295 u=zuul n=ansible | TASK [openshift_login : Append the KUBECONFIG to the install yamls parameters content={{ cifmw_openshift_login_install_yamls_artifacts_slurp['content'] | b64decode | from_yaml | combine( { 'cifmw_install_yamls_environment': { 'KUBECONFIG': cifmw_openshift_login_kubeconfig } }, recursive=true) | to_nice_yaml }}, dest={{ cifmw_basedir }}/artifacts/parameters/install-yamls-params.yml, mode=0600] *** 2026-01-22 12:06:45,943 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.227) 0:01:58.652 ****** 2026-01-22 12:06:45,943 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:45 +0000 (0:00:00.227) 0:01:58.650 ****** 2026-01-22 12:06:46,404 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:46,419 p=33295 u=zuul n=ansible | TASK [openshift_setup : Ensure output directory exists path={{ cifmw_openshift_setup_basedir }}/artifacts, state=directory, mode=0755] *** 2026-01-22 12:06:46,419 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.476) 0:01:59.129 ****** 2026-01-22 12:06:46,419 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.476) 0:01:59.127 ****** 2026-01-22 12:06:46,658 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:46,670 p=33295 u=zuul n=ansible | TASK [openshift_setup : Fetch namespaces to create cifmw_openshift_setup_namespaces={{ (( ([cifmw_install_yamls_defaults['NAMESPACE']] + ([cifmw_install_yamls_defaults['OPERATOR_NAMESPACE']] if 'OPERATOR_NAMESPACE' is in cifmw_install_yamls_defaults else []) ) if cifmw_install_yamls_defaults is defined else [] ) + cifmw_openshift_setup_create_namespaces) | unique }}] *** 2026-01-22 12:06:46,670 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.250) 0:01:59.379 ****** 2026-01-22 12:06:46,670 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.250) 0:01:59.377 ****** 2026-01-22 12:06:46,696 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:46,711 p=33295 u=zuul n=ansible | TASK [openshift_setup : Create required namespaces kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ item }}, kind=Namespace, state=present] *** 2026-01-22 12:06:46,711 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.041) 0:01:59.421 ****** 2026-01-22 12:06:46,711 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:46 +0000 (0:00:00.041) 0:01:59.419 ****** 2026-01-22 12:06:47,772 p=33295 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2026-01-22 12:06:48,485 p=33295 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2026-01-22 12:06:48,513 p=33295 u=zuul n=ansible | TASK [openshift_setup : Get internal OpenShift registry route kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Route, name=default-route, namespace=openshift-image-registry] *** 2026-01-22 12:06:48,514 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:48 +0000 (0:00:01.802) 0:02:01.223 ****** 2026-01-22 12:06:48,514 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:48 +0000 (0:00:01.802) 0:02:01.221 ****** 2026-01-22 12:06:49,677 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:49,688 p=33295 u=zuul n=ansible | TASK [openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces state=present, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'kind': 'RoleBinding', 'apiVersion': 'rbac.authorization.k8s.io/v1', 'metadata': {'name': 'system:image-puller', 'namespace': '{{ item }}'}, 'subjects': [{'kind': 'User', 'name': 'system:anonymous'}, {'kind': 'User', 'name': 'system:unauthenticated'}], 'roleRef': {'kind': 'ClusterRole', 'name': 'system:image-puller'}}] *** 2026-01-22 12:06:49,688 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:49 +0000 (0:00:01.174) 0:02:02.397 ****** 2026-01-22 12:06:49,688 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:49 +0000 (0:00:01.174) 0:02:02.395 ****** 2026-01-22 12:06:50,502 p=33295 u=zuul n=ansible | changed: [localhost] => (item=openstack) 2026-01-22 12:06:51,288 p=33295 u=zuul n=ansible | changed: [localhost] => (item=openstack-operators) 2026-01-22 12:06:51,302 p=33295 u=zuul n=ansible | TASK [openshift_setup : Wait for the image registry to be ready kind=Deployment, name=image-registry, namespace=openshift-image-registry, kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, wait=True, wait_sleep=10, wait_timeout=600, wait_condition={'type': 'Available', 'status': 'True'}] *** 2026-01-22 12:06:51,303 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:51 +0000 (0:00:01.614) 0:02:04.012 ****** 2026-01-22 12:06:51,303 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:51 +0000 (0:00:01.614) 0:02:04.010 ****** 2026-01-22 12:06:52,247 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:52,262 p=33295 u=zuul n=ansible | TASK [openshift_setup : Login into OpenShift internal registry output_dir={{ cifmw_openshift_setup_basedir }}/artifacts, script=podman login -u {{ cifmw_openshift_user }} -p {{ cifmw_openshift_token }} {%- if cifmw_openshift_setup_skip_internal_registry_tls_verify|bool %} --tls-verify=false {%- endif %} {{ cifmw_openshift_setup_registry_default_route.resources[0].spec.host }}] *** 2026-01-22 12:06:52,262 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.959) 0:02:04.971 ****** 2026-01-22 12:06:52,262 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.959) 0:02:04.969 ****** 2026-01-22 12:06:52,312 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_002_login_into_openshift_internal.log 2026-01-22 12:06:52,516 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:52,533 p=33295 u=zuul n=ansible | TASK [Ensure we have custom CA installed on host role=install_ca] ************** 2026-01-22 12:06:52,533 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.271) 0:02:05.242 ****** 2026-01-22 12:06:52,533 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.271) 0:02:05.240 ****** 2026-01-22 12:06:52,556 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,568 p=33295 u=zuul n=ansible | TASK [openshift_setup : Update ca bundle _raw_params=update-ca-trust extract] *** 2026-01-22 12:06:52,569 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.035) 0:02:05.278 ****** 2026-01-22 12:06:52,569 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.035) 0:02:05.276 ****** 2026-01-22 12:06:52,588 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,600 p=33295 u=zuul n=ansible | TASK [openshift_setup : Slurp CAs file src={{ cifmw_openshift_setup_ca_bundle_path }}] *** 2026-01-22 12:06:52,601 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.031) 0:02:05.310 ****** 2026-01-22 12:06:52,601 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.031) 0:02:05.308 ****** 2026-01-22 12:06:52,631 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,641 p=33295 u=zuul n=ansible | TASK [openshift_setup : Create config map with registry CAs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'v1', 'kind': 'ConfigMap', 'metadata': {'namespace': 'openshift-config', 'name': 'registry-cas'}, 'data': '{{ _config_map_data | items2dict }}'}] *** 2026-01-22 12:06:52,641 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.040) 0:02:05.351 ****** 2026-01-22 12:06:52,641 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.040) 0:02:05.349 ****** 2026-01-22 12:06:52,669 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,678 p=33295 u=zuul n=ansible | TASK [openshift_setup : Install Red Hat CA for pulling images from internal registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'additionalTrustedCA': {'name': 'registry-cas'}}}] *** 2026-01-22 12:06:52,678 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.036) 0:02:05.387 ****** 2026-01-22 12:06:52,678 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.036) 0:02:05.385 ****** 2026-01-22 12:06:52,706 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,718 p=33295 u=zuul n=ansible | TASK [openshift_setup : Add insecure registry kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, merge_type=merge, definition={'apiVersion': 'config.openshift.io/v1', 'kind': 'Image', 'metadata': {'name': 'cluster'}, 'spec': {'registrySources': {'insecureRegistries': ['{{ cifmw_update_containers_registry }}'], 'allowedRegistries': '{{ all_registries }}'}}}] *** 2026-01-22 12:06:52,718 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.040) 0:02:05.428 ****** 2026-01-22 12:06:52,719 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.040) 0:02:05.426 ****** 2026-01-22 12:06:52,742 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,750 p=33295 u=zuul n=ansible | TASK [openshift_setup : Create a ICSP with repository digest mirrors kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, definition={'apiVersion': 'operator.openshift.io/v1alpha1', 'kind': 'ImageContentSourcePolicy', 'metadata': {'name': 'registry-digest-mirrors'}, 'spec': {'repositoryDigestMirrors': '{{ cifmw_openshift_setup_digest_mirrors }}'}}] *** 2026-01-22 12:06:52,750 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.031) 0:02:05.459 ****** 2026-01-22 12:06:52,750 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.031) 0:02:05.457 ****** 2026-01-22 12:06:52,784 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:52,793 p=33295 u=zuul n=ansible | TASK [openshift_setup : Gather network.operator info kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=operator.openshift.io/v1, kind=Network, name=cluster] *** 2026-01-22 12:06:52,793 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.043) 0:02:05.503 ****** 2026-01-22 12:06:52,793 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:52 +0000 (0:00:00.043) 0:02:05.501 ****** 2026-01-22 12:06:53,567 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:53,596 p=33295 u=zuul n=ansible | TASK [openshift_setup : Patch network operator api_version=operator.openshift.io/v1, kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Network, name=cluster, persist_config=True, patch=[{'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/routingViaHost', 'value': True, 'op': 'replace'}, {'path': '/spec/defaultNetwork/ovnKubernetesConfig/gatewayConfig/ipForwarding', 'value': 'Global', 'op': 'replace'}]] *** 2026-01-22 12:06:53,596 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:53 +0000 (0:00:00.802) 0:02:06.305 ****** 2026-01-22 12:06:53,596 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:53 +0000 (0:00:00.802) 0:02:06.303 ****** 2026-01-22 12:06:54,542 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:54,556 p=33295 u=zuul n=ansible | TASK [openshift_setup : Patch samples registry configuration kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, api_version=samples.operator.openshift.io/v1, kind=Config, name=cluster, patch=[{'op': 'replace', 'path': '/spec/samplesRegistry', 'value': 'registry.redhat.io'}]] *** 2026-01-22 12:06:54,556 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:54 +0000 (0:00:00.959) 0:02:07.265 ****** 2026-01-22 12:06:54,556 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:54 +0000 (0:00:00.959) 0:02:07.263 ****** 2026-01-22 12:06:55,340 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:55,352 p=33295 u=zuul n=ansible | TASK [openshift_setup : Delete the pods from openshift-marketplace namespace kind=Pod, state=absent, delete_all=True, kubeconfig={{ cifmw_openshift_kubeconfig }}, namespace=openshift-marketplace] *** 2026-01-22 12:06:55,352 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.796) 0:02:08.062 ****** 2026-01-22 12:06:55,352 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.796) 0:02:08.060 ****** 2026-01-22 12:06:55,372 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,385 p=33295 u=zuul n=ansible | TASK [openshift_setup : Wait for openshift-marketplace pods to be running _raw_params=oc wait pod --all --for=condition=Ready -n openshift-marketplace --timeout=1m] *** 2026-01-22 12:06:55,386 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.033) 0:02:08.095 ****** 2026-01-22 12:06:55,386 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.033) 0:02:08.093 ****** 2026-01-22 12:06:55,406 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,429 p=33295 u=zuul n=ansible | TASK [Deploy Observability operator. name=openshift_obs] *********************** 2026-01-22 12:06:55,429 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.043) 0:02:08.138 ****** 2026-01-22 12:06:55,429 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.043) 0:02:08.136 ****** 2026-01-22 12:06:55,454 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,463 p=33295 u=zuul n=ansible | TASK [Deploy Metal3 BMHs name=deploy_bmh] ************************************** 2026-01-22 12:06:55,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.033) 0:02:08.172 ****** 2026-01-22 12:06:55,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.033) 0:02:08.170 ****** 2026-01-22 12:06:55,482 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,491 p=33295 u=zuul n=ansible | TASK [Install certmanager operator role name=cert_manager] ********************* 2026-01-22 12:06:55,492 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.028) 0:02:08.201 ****** 2026-01-22 12:06:55,492 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.028) 0:02:08.199 ****** 2026-01-22 12:06:55,512 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,523 p=33295 u=zuul n=ansible | TASK [Configure hosts networking using nmstate name=ci_nmstate] **************** 2026-01-22 12:06:55,523 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.031) 0:02:08.233 ****** 2026-01-22 12:06:55,523 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.031) 0:02:08.231 ****** 2026-01-22 12:06:55,547 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,555 p=33295 u=zuul n=ansible | TASK [Configure multus networks name=ci_multus] ******************************** 2026-01-22 12:06:55,555 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.032) 0:02:08.265 ****** 2026-01-22 12:06:55,555 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.032) 0:02:08.263 ****** 2026-01-22 12:06:55,578 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,586 p=33295 u=zuul n=ansible | TASK [Deploy Sushy Emulator service pod name=sushy_emulator] ******************* 2026-01-22 12:06:55,586 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.030) 0:02:08.295 ****** 2026-01-22 12:06:55,586 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.030) 0:02:08.293 ****** 2026-01-22 12:06:55,606 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,615 p=33295 u=zuul n=ansible | TASK [Setup Libvirt on controller name=libvirt_manager] ************************ 2026-01-22 12:06:55,615 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.029) 0:02:08.324 ****** 2026-01-22 12:06:55,615 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.029) 0:02:08.322 ****** 2026-01-22 12:06:55,633 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,643 p=33295 u=zuul n=ansible | TASK [Prepare container package builder name=pkg_build] ************************ 2026-01-22 12:06:55,643 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.027) 0:02:08.352 ****** 2026-01-22 12:06:55,643 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.027) 0:02:08.350 ****** 2026-01-22 12:06:55,665 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,673 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:06:55,673 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.030) 0:02:08.383 ****** 2026-01-22 12:06:55,674 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.030) 0:02:08.381 ****** 2026-01-22 12:06:55,733 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:55,742 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:06:55,742 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.068) 0:02:08.451 ****** 2026-01-22 12:06:55,742 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.068) 0:02:08.449 ****** 2026-01-22 12:06:55,825 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:55,834 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_infra _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:06:55,835 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.092) 0:02:08.544 ****** 2026-01-22 12:06:55,835 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.092) 0:02:08.542 ****** 2026-01-22 12:06:55,914 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,935 p=33295 u=zuul n=ansible | TASK [Set content-provider variables dictionary cifmw_content_provider_params={'cifmw_operator_build_output': '{{ cifmw_operator_build_output }}', 'cifmw_operator_build_meta_name': 'openstack-operator'}, cacheable=True] *** 2026-01-22 12:06:55,936 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.101) 0:02:08.645 ****** 2026-01-22 12:06:55,936 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.101) 0:02:08.643 ****** 2026-01-22 12:06:55,973 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:55,983 p=33295 u=zuul n=ansible | TASK [Write variables to cifmw custom params file path={{ cifwm_data_folder }}/custom-params.yml, line={{ cifmw_content_provider_params | to_nice_yaml }}] *** 2026-01-22 12:06:55,983 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.047) 0:02:08.692 ****** 2026-01-22 12:06:55,983 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:55 +0000 (0:00:00.047) 0:02:08.690 ****** 2026-01-22 12:06:56,006 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:56,019 p=33295 u=zuul n=ansible | TASK [Set content-provider variables dictionary cifmw_content_provider_params={'cifmw_operator_build_output': {'operators': {'openstack-operator': {'image_catalog': "{{ operators_catalog_img['openstack-operator'] }}"}}}, 'cifmw_operator_build_meta_name': 'openstack-operator'}, cacheable=True] *** 2026-01-22 12:06:56,020 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.036) 0:02:08.729 ****** 2026-01-22 12:06:56,020 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.036) 0:02:08.727 ****** 2026-01-22 12:06:56,044 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:56,053 p=33295 u=zuul n=ansible | TASK [Write variables to cifmw custom params file path={{ cifwm_data_folder }}/custom-params.yml, line={{ cifmw_content_provider_params | to_nice_yaml }}] *** 2026-01-22 12:06:56,053 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.033) 0:02:08.762 ****** 2026-01-22 12:06:56,053 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.033) 0:02:08.760 ****** 2026-01-22 12:06:56,075 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:56,085 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:06:56,085 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.032) 0:02:08.795 ****** 2026-01-22 12:06:56,086 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.032) 0:02:08.793 ****** 2026-01-22 12:06:56,145 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,154 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:06:56,154 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.068) 0:02:08.863 ****** 2026-01-22 12:06:56,154 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.068) 0:02:08.862 ****** 2026-01-22 12:06:56,232 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,246 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_deploy _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:06:56,246 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.091) 0:02:08.955 ****** 2026-01-22 12:06:56,246 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.091) 0:02:08.953 ****** 2026-01-22 12:06:56,347 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/run_hook/tasks/playbook.yml for localhost => (item={'name': 'Fetch compute facts', 'type': 'playbook', 'inventory': '/home/zuul/ci-framework-data/artifacts/zuul_inventory.yml', 'source': 'fetch_compute_facts.yml'}) 2026-01-22 12:06:56,359 p=33295 u=zuul n=ansible | TASK [run_hook : Set playbook path for Fetch compute facts cifmw_basedir={{ _bdir }}, hook_name={{ _hook_name }}, playbook_path={{ _play | realpath }}, log_path={{ _bdir }}/logs/{{ step }}_{{ _hook_name }}.log, extra_vars=-e namespace={{ cifmw_openstack_namespace }} {%- if hook.extra_vars is defined and hook.extra_vars|length > 0 -%} {% for key,value in hook.extra_vars.items() -%} {%- if key == 'file' %} -e "@{{ value }}" {%- else %} -e "{{ key }}={{ value }}" {%- endif %} {%- endfor %} {%- endif %}] *** 2026-01-22 12:06:56,359 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.113) 0:02:09.068 ****** 2026-01-22 12:06:56,359 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.112) 0:02:09.066 ****** 2026-01-22 12:06:56,403 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,420 p=33295 u=zuul n=ansible | TASK [run_hook : Get file stat path={{ playbook_path }}] *********************** 2026-01-22 12:06:56,420 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.060) 0:02:09.129 ****** 2026-01-22 12:06:56,420 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.061) 0:02:09.127 ****** 2026-01-22 12:06:56,687 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,696 p=33295 u=zuul n=ansible | TASK [run_hook : Fail if playbook doesn't exist msg=Playbook {{ playbook_path }} doesn't seem to exist.] *** 2026-01-22 12:06:56,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.276) 0:02:09.406 ****** 2026-01-22 12:06:56,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.276) 0:02:09.404 ****** 2026-01-22 12:06:56,722 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:56,733 p=33295 u=zuul n=ansible | TASK [run_hook : Get parameters files paths={{ (cifmw_basedir, 'artifacts/parameters') | path_join }}, file_type=file, patterns=*.yml] *** 2026-01-22 12:06:56,733 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.036) 0:02:09.442 ****** 2026-01-22 12:06:56,733 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.036) 0:02:09.440 ****** 2026-01-22 12:06:56,926 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,944 p=33295 u=zuul n=ansible | TASK [run_hook : Add parameters artifacts as extra variables extra_vars={{ extra_vars }} {% for file in cifmw_run_hook_parameters_files.files %} -e "@{{ file.path }}" {%- endfor %}] *** 2026-01-22 12:06:56,944 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.211) 0:02:09.654 ****** 2026-01-22 12:06:56,945 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.211) 0:02:09.652 ****** 2026-01-22 12:06:56,972 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:56,985 p=33295 u=zuul n=ansible | TASK [run_hook : Ensure log directory exists path={{ log_path | dirname }}, state=directory, mode=0755] *** 2026-01-22 12:06:56,986 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.041) 0:02:09.695 ****** 2026-01-22 12:06:56,986 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:56 +0000 (0:00:00.041) 0:02:09.693 ****** 2026-01-22 12:06:57,204 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:57,219 p=33295 u=zuul n=ansible | TASK [run_hook : Ensure artifacts directory exists path={{ cifmw_basedir }}/artifacts, state=directory, mode=0755] *** 2026-01-22 12:06:57,219 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:57 +0000 (0:00:00.233) 0:02:09.928 ****** 2026-01-22 12:06:57,219 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:57 +0000 (0:00:00.233) 0:02:09.926 ****** 2026-01-22 12:06:57,418 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:57,434 p=33295 u=zuul n=ansible | TASK [run_hook : Run hook without retry - Fetch compute facts] ***************** 2026-01-22 12:06:57,434 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:57 +0000 (0:00:00.215) 0:02:10.144 ****** 2026-01-22 12:06:57,435 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:06:57 +0000 (0:00:00.215) 0:02:10.142 ****** 2026-01-22 12:06:57,503 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_003_run_hook_without_retry_fetch.log 2026-01-22 12:07:00,446 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:00,463 p=33295 u=zuul n=ansible | TASK [run_hook : Run hook with retry - Fetch compute facts] ******************** 2026-01-22 12:07:00,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:03.028) 0:02:13.172 ****** 2026-01-22 12:07:00,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:03.028) 0:02:13.170 ****** 2026-01-22 12:07:00,490 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:00,510 p=33295 u=zuul n=ansible | TASK [run_hook : Check if we have a file path={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2026-01-22 12:07:00,510 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.047) 0:02:13.219 ****** 2026-01-22 12:07:00,510 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.047) 0:02:13.218 ****** 2026-01-22 12:07:00,822 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:00,835 p=33295 u=zuul n=ansible | TASK [run_hook : Load generated content in main playbook file={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml] *** 2026-01-22 12:07:00,835 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.324) 0:02:13.544 ****** 2026-01-22 12:07:00,835 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.324) 0:02:13.542 ****** 2026-01-22 12:07:00,869 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:00,892 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2026-01-22 12:07:00,892 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.056) 0:02:13.601 ****** 2026-01-22 12:07:00,892 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.057) 0:02:13.599 ****** 2026-01-22 12:07:01,027 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:01,042 p=33295 u=zuul n=ansible | TASK [Configure Storage Class name=ci_local_storage] *************************** 2026-01-22 12:07:01,042 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.150) 0:02:13.752 ****** 2026-01-22 12:07:01,042 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.150) 0:02:13.750 ****** 2026-01-22 12:07:01,168 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create role needed directories path={{ cifmw_cls_manifests_dir }}, state=directory, mode=0755] *** 2026-01-22 12:07:01,169 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.126) 0:02:13.878 ****** 2026-01-22 12:07:01,169 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.126) 0:02:13.876 ****** 2026-01-22 12:07:01,360 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:01,368 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create the cifmw_cls_namespace namespace" kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ cifmw_cls_namespace }}, kind=Namespace, state=present] *** 2026-01-22 12:07:01,369 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.199) 0:02:14.078 ****** 2026-01-22 12:07:01,369 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:01 +0000 (0:00:00.199) 0:02:14.076 ****** 2026-01-22 12:07:02,139 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:02,152 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Save storage manifests as artifacts dest={{ cifmw_cls_manifests_dir }}/storage-class.yaml, content={{ cifmw_cls_storage_manifest | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:07:02,152 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:02 +0000 (0:00:00.783) 0:02:14.862 ****** 2026-01-22 12:07:02,152 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:02 +0000 (0:00:00.783) 0:02:14.860 ****** 2026-01-22 12:07:02,577 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:02,589 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Get k8s nodes kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Node] *** 2026-01-22 12:07:02,589 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:02 +0000 (0:00:00.436) 0:02:15.298 ****** 2026-01-22 12:07:02,589 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:02 +0000 (0:00:00.436) 0:02:15.296 ****** 2026-01-22 12:07:03,437 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:03,454 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Fetch hostnames for all hosts _raw_params=hostname] *** 2026-01-22 12:07:03,454 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:03 +0000 (0:00:00.864) 0:02:16.163 ****** 2026-01-22 12:07:03,454 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:03 +0000 (0:00:00.864) 0:02:16.161 ****** 2026-01-22 12:07:04,400 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=crc) 2026-01-22 12:07:04,953 p=33295 u=zuul n=ansible | changed: [localhost -> standalone(38.102.83.154)] => (item=standalone) 2026-01-22 12:07:05,510 p=33295 u=zuul n=ansible | changed: [localhost -> controller(38.102.83.83)] => (item=controller) 2026-01-22 12:07:05,988 p=33295 u=zuul n=ansible | changed: [localhost] => (item=localhost) 2026-01-22 12:07:05,990 p=33295 u=zuul n=ansible | [WARNING]: Platform linux on host localhost is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.15/reference_appendices/interpreter_discovery.html for more information. 2026-01-22 12:07:06,001 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Set the hosts k8s ansible hosts cifmw_ci_local_storage_k8s_hosts={{ _host_map | selectattr("key", "in", k8s_nodes_hostnames) | map(attribute="value") | list }}, cifmw_ci_local_storage_k8s_hostnames={{ k8s_nodes_hostnames }}] *** 2026-01-22 12:07:06,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:05 +0000 (0:00:02.547) 0:02:18.710 ****** 2026-01-22 12:07:06,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:05 +0000 (0:00:02.546) 0:02:18.708 ****** 2026-01-22 12:07:06,039 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:06,049 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Apply the storage class manifests kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, src={{ cifmw_cls_manifests_dir }}/storage-class.yaml] *** 2026-01-22 12:07:06,049 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.048) 0:02:18.758 ****** 2026-01-22 12:07:06,049 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.048) 0:02:18.756 ****** 2026-01-22 12:07:06,800 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:06,812 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create directories on worker node _raw_params=worker_node_dirs.yml] *** 2026-01-22 12:07:06,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.763) 0:02:19.522 ****** 2026-01-22 12:07:06,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.763) 0:02:19.520 ****** 2026-01-22 12:07:06,849 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_local_storage/tasks/worker_node_dirs.yml for localhost => (item=crc) 2026-01-22 12:07:06,861 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Perform action in the PV directory path={{ [ cifmw_cls_local_storage_name, 'pv'+ ("%02d" | format(item | int)) ] | path_join }}, state={{ 'directory' if cifmw_cls_action == 'create' else 'absent' }}, mode=0775] *** 2026-01-22 12:07:06,861 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.048) 0:02:19.571 ****** 2026-01-22 12:07:06,861 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:06 +0000 (0:00:00.048) 0:02:19.569 ****** 2026-01-22 12:07:07,363 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=1) 2026-01-22 12:07:07,814 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=2) 2026-01-22 12:07:08,227 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=3) 2026-01-22 12:07:08,679 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=4) 2026-01-22 12:07:09,121 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=5) 2026-01-22 12:07:09,600 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=6) 2026-01-22 12:07:10,043 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=7) 2026-01-22 12:07:10,526 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=8) 2026-01-22 12:07:10,968 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=9) 2026-01-22 12:07:11,443 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=10) 2026-01-22 12:07:11,917 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=11) 2026-01-22 12:07:12,354 p=33295 u=zuul n=ansible | changed: [localhost -> crc(38.102.83.97)] => (item=12) 2026-01-22 12:07:12,369 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Generate pv related storage manifest file src=storage.yaml.j2, dest={{ cifmw_cls_manifests_dir }}/storage.yaml, mode=0644] *** 2026-01-22 12:07:12,369 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:12 +0000 (0:00:05.508) 0:02:25.079 ****** 2026-01-22 12:07:12,369 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:12 +0000 (0:00:05.508) 0:02:25.077 ****** 2026-01-22 12:07:12,809 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:12,819 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Apply pv related storage manifest file kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, src={{ cifmw_cls_manifests_dir }}/storage.yaml] *** 2026-01-22 12:07:12,819 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:12 +0000 (0:00:00.449) 0:02:25.528 ****** 2026-01-22 12:07:12,819 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:12 +0000 (0:00:00.449) 0:02:25.526 ****** 2026-01-22 12:07:13,721 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:13,764 p=33295 u=zuul n=ansible | TASK [Configure LVMS Storage Class name=ci_lvms_storage] *********************** 2026-01-22 12:07:13,764 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.945) 0:02:26.474 ****** 2026-01-22 12:07:13,764 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.945) 0:02:26.472 ****** 2026-01-22 12:07:13,799 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:13,811 p=33295 u=zuul n=ansible | TASK [Run edpm_prepare name=edpm_prepare] ************************************** 2026-01-22 12:07:13,811 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.046) 0:02:26.520 ****** 2026-01-22 12:07:13,811 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.046) 0:02:26.518 ****** 2026-01-22 12:07:13,952 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Define minimal set of repo variables when not running on Zuul _install_yamls_repos={'OPENSTACK_BRANCH': '', "GIT_CLONE_OPTS'": '-l', "OPENSTACK_REPO'": '{{ operators_build_output[cifmw_operator_build_meta_name].git_src_dir }}'}] *** 2026-01-22 12:07:13,952 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.141) 0:02:26.662 ****** 2026-01-22 12:07:13,952 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.141) 0:02:26.660 ****** 2026-01-22 12:07:13,977 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:13,987 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Set install_yamls Makefile environment variables cifmw_edpm_prepare_common_env={{ cifmw_install_yamls_environment | combine({'PATH': cifmw_path}) | combine(_install_yamls_repos | default({})) | combine(cifmw_edpm_prepare_extra_vars | default({})) }}, cifmw_edpm_prepare_make_openstack_env={% if cifmw_operator_build_meta_name is defined and cifmw_operator_build_meta_name in operators_build_output %} OPENSTACK_IMG: {{ operators_build_output[cifmw_operator_build_meta_name].image_catalog }} {% endif %} , cifmw_edpm_prepare_make_openstack_deploy_prep_env=CLEANUP_DIR_CMD: "true" , cifmw_edpm_prepare_operators_build_output={{ operators_build_output }}] *** 2026-01-22 12:07:13,987 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.034) 0:02:26.696 ****** 2026-01-22 12:07:13,987 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:13 +0000 (0:00:00.034) 0:02:26.694 ****** 2026-01-22 12:07:14,017 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:07:14,027 p=33295 u=zuul n=ansible | TASK [Prepare storage in CRC name=install_yamls_makes, tasks_from=make_crc_storage] *** 2026-01-22 12:07:14,027 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.039) 0:02:26.736 ****** 2026-01-22 12:07:14,027 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.039) 0:02:26.734 ****** 2026-01-22 12:07:14,083 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_crc_storage_env var=make_crc_storage_env] *** 2026-01-22 12:07:14,083 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.056) 0:02:26.792 ****** 2026-01-22 12:07:14,083 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.056) 0:02:26.790 ****** 2026-01-22 12:07:14,127 p=33295 u=zuul n=ansible | ok: [localhost] => make_crc_storage_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:07:14,135 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_crc_storage_params var=make_crc_storage_params] *** 2026-01-22 12:07:14,135 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.052) 0:02:26.844 ****** 2026-01-22 12:07:14,135 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.052) 0:02:26.842 ****** 2026-01-22 12:07:14,165 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:14,174 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run crc_storage output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make crc_storage, dry_run={{ make_crc_storage_dryrun|default(false)|bool }}, extra_args={{ dict((make_crc_storage_env|default({})), **(make_crc_storage_params|default({}))) }}] *** 2026-01-22 12:07:14,174 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.038) 0:02:26.883 ****** 2026-01-22 12:07:14,174 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:14 +0000 (0:00:00.038) 0:02:26.881 ****** 2026-01-22 12:07:14,245 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_004_run_crc.log 2026-01-22 12:07:35,989 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_crc_storage_until | default(true) }} 2026-01-22 12:07:35,992 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:36,008 p=33295 u=zuul n=ansible | TASK [Prepare inputs name=install_yamls_makes, tasks_from=make_input] ********** 2026-01-22 12:07:36,008 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:21.834) 0:02:48.718 ****** 2026-01-22 12:07:36,008 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:21.834) 0:02:48.716 ****** 2026-01-22 12:07:36,064 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_input_env var=make_input_env] *********** 2026-01-22 12:07:36,064 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.055) 0:02:48.773 ****** 2026-01-22 12:07:36,064 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.055) 0:02:48.771 ****** 2026-01-22 12:07:36,090 p=33295 u=zuul n=ansible | ok: [localhost] => make_input_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:07:36,098 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_input_params var=make_input_params] ***** 2026-01-22 12:07:36,099 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.034) 0:02:48.808 ****** 2026-01-22 12:07:36,099 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.034) 0:02:48.806 ****** 2026-01-22 12:07:36,117 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:36,126 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run input output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make input, dry_run={{ make_input_dryrun|default(false)|bool }}, extra_args={{ dict((make_input_env|default({})), **(make_input_params|default({}))) }}] *** 2026-01-22 12:07:36,126 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.027) 0:02:48.836 ****** 2026-01-22 12:07:36,126 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:36 +0000 (0:00:00.027) 0:02:48.834 ****** 2026-01-22 12:07:36,171 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_005_run.log 2026-01-22 12:07:37,257 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_input_until | default(true) }} 2026-01-22 12:07:37,259 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:37,273 p=33295 u=zuul n=ansible | TASK [OpenStack meta-operator installation name=install_yamls_makes, tasks_from=make_openstack] *** 2026-01-22 12:07:37,273 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:01.146) 0:02:49.983 ****** 2026-01-22 12:07:37,273 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:01.146) 0:02:49.981 ****** 2026-01-22 12:07:37,331 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_env var=make_openstack_env] *** 2026-01-22 12:07:37,331 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.057) 0:02:50.040 ****** 2026-01-22 12:07:37,331 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.057) 0:02:50.038 ****** 2026-01-22 12:07:37,359 p=33295 u=zuul n=ansible | ok: [localhost] => make_openstack_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:07:37,367 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_params var=make_openstack_params] *** 2026-01-22 12:07:37,368 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.036) 0:02:50.077 ****** 2026-01-22 12:07:37,368 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.036) 0:02:50.075 ****** 2026-01-22 12:07:37,388 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:07:37,397 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run openstack output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make openstack, dry_run={{ make_openstack_dryrun|default(false)|bool }}, extra_args={{ dict((make_openstack_env|default({})), **(make_openstack_params|default({}))) }}] *** 2026-01-22 12:07:37,397 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.029) 0:02:50.106 ****** 2026-01-22 12:07:37,397 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:07:37 +0000 (0:00:00.029) 0:02:50.104 ****** 2026-01-22 12:07:37,444 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_006_run.log 2026-01-22 12:10:42,739 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_openstack_until | default(true) }} 2026-01-22 12:10:42,743 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:10:42,758 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Wait for OpenStack subscription creation _raw_params=oc get sub openstack-operator --namespace={{ cifmw_install_yamls_defaults['OPERATOR_NAMESPACE'] }} -o=jsonpath='{.status.installplan.name}'] *** 2026-01-22 12:10:42,758 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:10:42 +0000 (0:03:05.361) 0:05:55.468 ****** 2026-01-22 12:10:42,758 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:10:42 +0000 (0:03:05.361) 0:05:55.466 ****** 2026-01-22 12:11:43,707 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:11:43,716 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Wait for OpenStack operator to get installed _raw_params=oc wait InstallPlan {{ cifmw_edpm_prepare_wait_installplan_out.stdout }} --namespace={{ cifmw_install_yamls_defaults['OPERATOR_NAMESPACE'] }} --for=jsonpath='{.status.phase}'=Complete --timeout=20m] *** 2026-01-22 12:11:43,716 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:43 +0000 (0:01:00.958) 0:06:56.426 ****** 2026-01-22 12:11:43,716 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:43 +0000 (0:01:00.958) 0:06:56.424 ****** 2026-01-22 12:11:44,125 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:11:44,135 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Check if the OpenStack initialization CRD exists kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, kind=CustomResourceDefinition, name=openstacks.operator.openstack.org] *** 2026-01-22 12:11:44,135 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:44 +0000 (0:00:00.419) 0:06:56.845 ****** 2026-01-22 12:11:44,136 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:44 +0000 (0:00:00.419) 0:06:56.843 ****** 2026-01-22 12:11:45,037 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:11:45,048 p=33295 u=zuul n=ansible | TASK [OpenStack meta-operator initialization, if necessary name=install_yamls_makes, tasks_from=make_openstack_init] *** 2026-01-22 12:11:45,048 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.912) 0:06:57.757 ****** 2026-01-22 12:11:45,048 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.912) 0:06:57.755 ****** 2026-01-22 12:11:45,113 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_init_env var=make_openstack_init_env] *** 2026-01-22 12:11:45,114 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.065) 0:06:57.823 ****** 2026-01-22 12:11:45,114 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.065) 0:06:57.821 ****** 2026-01-22 12:11:45,150 p=33295 u=zuul n=ansible | ok: [localhost] => make_openstack_init_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:11:45,159 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_init_params var=make_openstack_init_params] *** 2026-01-22 12:11:45,159 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.045) 0:06:57.869 ****** 2026-01-22 12:11:45,159 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.045) 0:06:57.867 ****** 2026-01-22 12:11:45,187 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:11:45,197 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run openstack_init output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make openstack_init, dry_run={{ make_openstack_init_dryrun|default(false)|bool }}, extra_args={{ dict((make_openstack_init_env|default({})), **(make_openstack_init_params|default({}))) }}] *** 2026-01-22 12:11:45,197 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.038) 0:06:57.907 ****** 2026-01-22 12:11:45,197 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:11:45 +0000 (0:00:00.038) 0:06:57.905 ****** 2026-01-22 12:11:45,255 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_007_run_openstack.log 2026-01-22 12:13:29,514 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_openstack_init_until | default(true) }} 2026-01-22 12:13:29,515 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:13:29,534 p=33295 u=zuul n=ansible | TASK [Update OpenStack Services containers Env name=set_openstack_containers] *** 2026-01-22 12:13:29,534 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:01:44.336) 0:08:42.243 ****** 2026-01-22 12:13:29,534 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:01:44.336) 0:08:42.241 ****** 2026-01-22 12:13:29,561 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:29,569 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Set facts for baremetal UEFI image url cifmw_update_containers_edpm_image_url={{ cifmw_build_images_output['images']['edpm-hardened-uefi']['image'] }}, cacheable=True] *** 2026-01-22 12:13:29,570 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.035) 0:08:42.279 ****** 2026-01-22 12:13:29,570 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.035) 0:08:42.277 ****** 2026-01-22 12:13:29,594 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:29,606 p=33295 u=zuul n=ansible | TASK [Prepare OpenStack control plane CR name=install_yamls_makes, tasks_from=make_openstack_deploy_prep] *** 2026-01-22 12:13:29,607 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.036) 0:08:42.316 ****** 2026-01-22 12:13:29,607 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.036) 0:08:42.314 ****** 2026-01-22 12:13:29,672 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_deploy_prep_env var=make_openstack_deploy_prep_env] *** 2026-01-22 12:13:29,672 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.065) 0:08:42.382 ****** 2026-01-22 12:13:29,672 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.065) 0:08:42.380 ****** 2026-01-22 12:13:29,707 p=33295 u=zuul n=ansible | ok: [localhost] => make_openstack_deploy_prep_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' CLEANUP_DIR_CMD: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:13:29,717 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_openstack_deploy_prep_params var=make_openstack_deploy_prep_params] *** 2026-01-22 12:13:29,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.044) 0:08:42.426 ****** 2026-01-22 12:13:29,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.044) 0:08:42.424 ****** 2026-01-22 12:13:29,744 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:29,753 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run openstack_deploy_prep output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make openstack_deploy_prep, dry_run={{ make_openstack_deploy_prep_dryrun|default(false)|bool }}, extra_args={{ dict((make_openstack_deploy_prep_env|default({})), **(make_openstack_deploy_prep_params|default({}))) }}] *** 2026-01-22 12:13:29,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.036) 0:08:42.462 ****** 2026-01-22 12:13:29,753 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:29 +0000 (0:00:00.036) 0:08:42.461 ****** 2026-01-22 12:13:29,810 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_008_run_openstack_deploy.log 2026-01-22 12:13:31,038 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_openstack_deploy_prep_until | default(true) }} 2026-01-22 12:13:31,040 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:13:31,058 p=33295 u=zuul n=ansible | TASK [Deploy NetConfig name=install_yamls_makes, tasks_from=make_netconfig_deploy] *** 2026-01-22 12:13:31,059 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:01.305) 0:08:43.768 ****** 2026-01-22 12:13:31,059 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:01.305) 0:08:43.766 ****** 2026-01-22 12:13:31,133 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_netconfig_deploy_env var=make_netconfig_deploy_env] *** 2026-01-22 12:13:31,133 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.074) 0:08:43.843 ****** 2026-01-22 12:13:31,133 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.074) 0:08:43.841 ****** 2026-01-22 12:13:31,161 p=33295 u=zuul n=ansible | ok: [localhost] => make_netconfig_deploy_env: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm PATH: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin 2026-01-22 12:13:31,169 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_netconfig_deploy_params var=make_netconfig_deploy_params] *** 2026-01-22 12:13:31,169 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.036) 0:08:43.879 ****** 2026-01-22 12:13:31,170 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.036) 0:08:43.877 ****** 2026-01-22 12:13:31,191 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:31,201 p=33295 u=zuul n=ansible | TASK [install_yamls_makes : Run netconfig_deploy output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls, script=make netconfig_deploy, dry_run={{ make_netconfig_deploy_dryrun|default(false)|bool }}, extra_args={{ dict((make_netconfig_deploy_env|default({})), **(make_netconfig_deploy_params|default({}))) }}] *** 2026-01-22 12:13:31,201 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.031) 0:08:43.911 ****** 2026-01-22 12:13:31,201 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:31 +0000 (0:00:00.031) 0:08:43.909 ****** 2026-01-22 12:13:31,261 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_009_run_netconfig.log 2026-01-22 12:13:35,810 p=33295 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_netconfig_deploy_until | default(true) }} 2026-01-22 12:13:35,813 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:13:35,831 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Kustomize and deploy OpenStackControlPlane _raw_params=kustomize_and_deploy.yml] *** 2026-01-22 12:13:35,831 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:04.629) 0:08:48.540 ****** 2026-01-22 12:13:35,831 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:04.629) 0:08:48.538 ****** 2026-01-22 12:13:35,873 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/edpm_prepare/tasks/kustomize_and_deploy.yml for localhost 2026-01-22 12:13:35,904 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Controlplane name _ctlplane_name=controlplane] ************ 2026-01-22 12:13:35,904 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.073) 0:08:48.614 ****** 2026-01-22 12:13:35,905 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.073) 0:08:48.612 ****** 2026-01-22 12:13:35,928 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:13:35,938 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Set vars related to update_containers content provider cifmw_update_containers_registry={{ content_provider_os_registry_url | split('/') | first }}, cifmw_update_containers_org={{ content_provider_os_registry_url | split('/') | last }}, cifmw_update_containers_tag={{ content_provider_dlrn_md5_hash }}, cifmw_update_containers_openstack=True] *** 2026-01-22 12:13:35,938 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.033) 0:08:48.647 ****** 2026-01-22 12:13:35,938 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.033) 0:08:48.645 ****** 2026-01-22 12:13:35,959 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:35,968 p=33295 u=zuul n=ansible | TASK [Prepare OpenStackVersion CR name=update_containers] ********************** 2026-01-22 12:13:35,968 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.030) 0:08:48.678 ****** 2026-01-22 12:13:35,968 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:35 +0000 (0:00:00.030) 0:08:48.676 ****** 2026-01-22 12:13:35,992 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:13:36,001 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Controlplane name kustomization _ctlplane_name_kustomizations=[{'apiVersion': 'kustomize.config.k8s.io/v1beta1', 'kind': 'Kustomization', 'patches': [{'target': {'kind': 'OpenStackControlPlane'}, 'patch': '- op: replace\n path: /metadata/name\n value: {{ _ctlplane_name }}'}]}]] *** 2026-01-22 12:13:36,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.032) 0:08:48.710 ****** 2026-01-22 12:13:36,001 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.032) 0:08:48.709 ****** 2026-01-22 12:13:36,027 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:13:36,046 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Perform kustomizations to the OpenStackControlPlane CR target_path={{ cifmw_edpm_prepare_openstack_crs_path }}, sort_ascending=False, kustomizations={{ cifmw_edpm_prepare_kustomizations + _ctlplane_name_kustomizations + (cifmw_edpm_prepare_extra_kustomizations | default([])) }}, kustomizations_paths={{ [ ( [ cifmw_edpm_prepare_manifests_dir, 'kustomizations', 'controlplane' ] | ansible.builtin.path_join ) ] }}] *** 2026-01-22 12:13:36,046 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.044) 0:08:48.755 ****** 2026-01-22 12:13:36,046 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.044) 0:08:48.753 ****** 2026-01-22 12:13:36,766 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:13:36,781 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Log the CR that is about to be applied var=cifmw_edpm_prepare_crs_kustomize_result] *** 2026-01-22 12:13:36,781 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.735) 0:08:49.490 ****** 2026-01-22 12:13:36,781 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.735) 0:08:49.488 ****** 2026-01-22 12:13:36,821 p=33295 u=zuul n=ansible | ok: [localhost] => cifmw_edpm_prepare_crs_kustomize_result: changed: true count: 3 failed: false kustomizations_paths: - /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr/kustomization.yaml - /home/zuul/ci-framework-data/artifacts/manifests/kustomizations/controlplane/99-kustomization.yaml output_path: /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr/cifmw-kustomization-result.yaml result: - apiVersion: core.openstack.org/v1beta1 kind: OpenStackControlPlane metadata: labels: created-by: install_yamls name: controlplane namespace: openstack spec: barbican: apiOverride: route: {} template: barbicanAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 barbicanKeystoneListener: replicas: 1 barbicanWorker: replicas: 1 databaseInstance: openstack secret: os**********et cinder: apiOverride: route: {} template: cinderAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer cinderBackup: networkAttachments: - storage replicas: 0 cinderScheduler: replicas: 1 cinderVolumes: volume1: networkAttachments: - storage replicas: 0 databaseInstance: openstack secret: os**********et designate: apiOverride: route: {} enabled: false template: databaseInstance: openstack designateAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer designateBackendbind9: networkAttachments: - designate replicas: 1 storageClass: local-storage storageRequest: 10G designateCentral: replicas: 1 designateMdns: networkAttachments: - designate replicas: 1 designateProducer: replicas: 1 designateWorker: networkAttachments: - designate replicas: 1 secret: os**********et dns: template: options: - key: server values: - 192.168.122.10 - key: no-negcache values: [] override: service: metadata: annotations: metallb.universe.tf/address-pool: ctlplane metallb.universe.tf/allow-shared-ip: ctlplane metallb.universe.tf/loadBalancerIPs: 192.168.122.80 spec: type: LoadBalancer replicas: 1 galera: templates: openstack: replicas: 1 secret: os**********et storageRequest: 10G openstack-cell1: replicas: 1 secret: os**********et storageRequest: 10G glance: apiOverrides: default: route: {} template: customServiceConfig: | [DEFAULT] enabled_backends = default_backend:swift [glance_store] default_backend = default_backend [default_backend] swift_store_create_container_on_put = True swift_store_auth_version = 3 swift_store_auth_address = {{ .KeystoneInternalURL }} swift_store_endpoint_type = internalURL swift_store_user = service:glance swift_store_key = {{ .ServicePassword }} databaseInstance: openstack glanceAPIs: default: networkAttachments: - storage override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 keystoneEndpoint: default secret: os**********et storage: storageClass: '' storageRequest: 10G heat: apiOverride: route: {} cnfAPIOverride: route: {} enabled: false template: databaseInstance: openstack heatAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 heatEngine: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 secret: os**********et horizon: apiOverride: route: {} template: replicas: 1 secret: os**********et ironic: enabled: false template: databaseInstance: openstack ironicAPI: replicas: 1 ironicConductors: - replicas: 1 storageRequest: 10G ironicInspector: replicas: 1 ironicNeutronAgent: replicas: 1 secret: os**********et keystone: apiOverride: route: {} template: databaseInstance: openstack override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: os**********et manila: apiOverride: route: {} template: databaseInstance: openstack manilaAPI: networkAttachments: - internalapi override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 manilaScheduler: replicas: 1 manilaShares: share1: networkAttachments: - storage replicas: 1 memcached: templates: memcached: replicas: 1 neutron: apiOverride: route: {} template: databaseInstance: openstack networkAttachments: - internalapi override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: os**********et nova: apiOverride: route: {} template: apiServiceTemplate: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer cellTemplates: cell0: cellDatabaseAccount: nova-cell0 cellDatabaseInstance: openstack cellMessageBusInstance: rabbitmq conductorServiceTemplate: replicas: 1 hasAPIAccess: true cell1: cellDatabaseAccount: nova-cell1 cellDatabaseInstance: openstack-cell1 cellMessageBusInstance: rabbitmq-cell1 conductorServiceTemplate: replicas: 1 hasAPIAccess: true metadataServiceTemplate: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: os**********et octavia: enabled: false template: databaseInstance: openstack octaviaAPI: replicas: 1 secret: os**********et ovn: template: ovnController: networkAttachment: tenant nicMappings: datacentre: ospbr ovnDBCluster: ovndbcluster-nb: dbType: NB networkAttachment: internalapi storageRequest: 10G ovndbcluster-sb: dbType: SB networkAttachment: internalapi storageRequest: 10G placement: apiOverride: route: {} template: databaseInstance: openstack override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: os**********et rabbitmq: templates: rabbitmq: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.85 spec: type: LoadBalancer rabbitmq-cell1: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.86 spec: type: LoadBalancer redis: enabled: false secret: os**********et storageClass: local-storage swift: enabled: true proxyOverride: route: {} template: swiftProxy: networkAttachments: - storage override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 swiftRing: ringReplicas: 1 swiftStorage: networkAttachments: - storage replicas: 1 telemetry: enabled: true template: autoscaling: aodh: databaseAccount: aodh databaseInstance: openstack passwordSelectors: null secret: os**********et enabled: false heatInstance: heat ceilometer: enabled: true secret: os**********et cloudkitty: apiTimeout: 0 cloudKittyAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 resources: {} tls: api: internal: {} public: {} caBundleSecretName: combined-ca-bundle cloudKittyProc: replicas: 1 resources: {} tls: caBundleSecretName: combined-ca-bundle databaseAccount: cloudkitty databaseInstance: openstack enabled: false memcachedInstance: memcached passwordSelector: aodhService: AodhPassword ceilometerService: CeilometerPassword cloudKittyService: CloudKittyPassword preserveJobs: false rabbitMqClusterName: rabbitmq s3StorageConfig: schemas: - effectiveDate: '2024-11-18' version: v13 secret: name: cloudkitty-loki-s3 type: s3 secret: os**********et serviceUser: cloudkitty storageClass: local-storage logging: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 cloNamespace: openshift-logging enabled: false ipaddr: 172.17.0.80 port: 10514 metricStorage: enabled: false monitoringStack: alertingEnabled: true scrapeInterval: 30s storage: persistent: pvcStorageRequest: 10G retention: 24h strategy: persistent 2026-01-22 12:13:36,832 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Apply the OpenStackControlPlane CR output_dir={{ cifmw_edpm_prepare_basedir }}/artifacts, script=oc apply -f {{ cifmw_edpm_prepare_crs_kustomize_result.output_path }}] *** 2026-01-22 12:13:36,833 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.051) 0:08:49.542 ****** 2026-01-22 12:13:36,833 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:36 +0000 (0:00:00.051) 0:08:49.540 ****** 2026-01-22 12:13:36,888 p=33295 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_010_apply_the.log 2026-01-22 12:13:37,248 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:13:37,261 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Wait for control plane to change its status seconds={{ cifmw_edpm_prepare_wait_controplane_status_change_sec }}] *** 2026-01-22 12:13:37,261 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:37 +0000 (0:00:00.428) 0:08:49.970 ****** 2026-01-22 12:13:37,261 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:13:37 +0000 (0:00:00.428) 0:08:49.969 ****** 2026-01-22 12:13:37,290 p=33295 u=zuul n=ansible | Pausing for 30 seconds 2026-01-22 12:14:07,324 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:14:07,335 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Wait for OpenStack controlplane to be deployed _raw_params=oc wait OpenStackControlPlane {{ _ctlplane_name }} --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for=condition=ready --timeout={{ cifmw_edpm_prepare_timeout }}m] *** 2026-01-22 12:14:07,335 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:14:07 +0000 (0:00:30.074) 0:09:20.044 ****** 2026-01-22 12:14:07,335 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:14:07 +0000 (0:00:30.074) 0:09:20.043 ****** 2026-01-22 12:19:30,435 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:30,445 p=33295 u=zuul n=ansible | TASK [Extract and install OpenStackControlplane CA role=install_openstack_ca] *** 2026-01-22 12:19:30,445 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:05:23.110) 0:14:43.155 ****** 2026-01-22 12:19:30,445 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:05:23.110) 0:14:43.153 ****** 2026-01-22 12:19:30,549 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Get CA bundle data with retries] ****************** 2026-01-22 12:19:30,549 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:00:00.103) 0:14:43.258 ****** 2026-01-22 12:19:30,549 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:00:00.103) 0:14:43.256 ****** 2026-01-22 12:19:30,948 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:30,958 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Set _ca_bundle fact if CA returned from OCP] ****** 2026-01-22 12:19:30,958 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:00:00.408) 0:14:43.667 ****** 2026-01-22 12:19:30,958 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:30 +0000 (0:00:00.408) 0:14:43.665 ****** 2026-01-22 12:19:30,995 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:31,005 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Creating tls-ca-bundle.pem from CA bundle dest={{ cifmw_install_openstack_ca_file_full_path }}, content={{ _ca_bundle }}, mode=0644] *** 2026-01-22 12:19:31,006 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.047) 0:14:43.715 ****** 2026-01-22 12:19:31,006 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.047) 0:14:43.713 ****** 2026-01-22 12:19:31,423 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:31,433 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Check if OpenStackControlplane CA file is present path={{ cifmw_install_openstack_ca_file_full_path }}, get_attributes=False, get_checksum=False, get_mime=False] *** 2026-01-22 12:19:31,434 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.428) 0:14:44.143 ****** 2026-01-22 12:19:31,434 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.428) 0:14:44.141 ****** 2026-01-22 12:19:31,620 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:31,628 p=33295 u=zuul n=ansible | TASK [Call install_ca role to inject OpenStackControlplane CA file if present role=install_ca] *** 2026-01-22 12:19:31,628 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.194) 0:14:44.337 ****** 2026-01-22 12:19:31,628 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.194) 0:14:44.335 ****** 2026-01-22 12:19:31,683 p=33295 u=zuul n=ansible | TASK [install_ca : Ensure target directory exists path={{ cifmw_install_ca_trust_dir }}, state=directory, mode=0755] *** 2026-01-22 12:19:31,683 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.055) 0:14:44.393 ****** 2026-01-22 12:19:31,684 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.055) 0:14:44.391 ****** 2026-01-22 12:19:31,887 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:31,895 p=33295 u=zuul n=ansible | TASK [install_ca : Install internal CA from url url={{ cifmw_install_ca_url }}, dest={{ cifmw_install_ca_trust_dir }}, validate_certs={{ cifmw_install_ca_url_validate_certs | default(omit) }}, mode=0644] *** 2026-01-22 12:19:31,895 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.211) 0:14:44.605 ****** 2026-01-22 12:19:31,896 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.211) 0:14:44.603 ****** 2026-01-22 12:19:31,918 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:31,927 p=33295 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from inline dest={{ cifmw_install_ca_trust_dir }}/cifmw_inline_ca_bundle.crt, content={{ cifmw_install_ca_bundle_inline }}, mode=0644] *** 2026-01-22 12:19:31,927 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.031) 0:14:44.636 ****** 2026-01-22 12:19:31,927 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.031) 0:14:44.634 ****** 2026-01-22 12:19:31,949 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:31,959 p=33295 u=zuul n=ansible | TASK [install_ca : Install custom CA bundle from file dest={{ cifmw_install_ca_trust_dir }}/{{ cifmw_install_ca_bundle_src | basename }}, src={{ cifmw_install_ca_bundle_src }}, mode=0644] *** 2026-01-22 12:19:31,959 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.032) 0:14:44.668 ****** 2026-01-22 12:19:31,959 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:31 +0000 (0:00:00.032) 0:14:44.666 ****** 2026-01-22 12:19:32,405 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:32,413 p=33295 u=zuul n=ansible | TASK [install_ca : Update ca bundle _raw_params=update-ca-trust] *************** 2026-01-22 12:19:32,413 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:32 +0000 (0:00:00.454) 0:14:45.123 ****** 2026-01-22 12:19:32,413 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:32 +0000 (0:00:00.454) 0:14:45.121 ****** 2026-01-22 12:19:34,008 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:34,039 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Extract keystone endpoint host _raw_params=oc get keystoneapi keystone --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} -o jsonpath='{ .status.apiEndpoints.public }'] *** 2026-01-22 12:19:34,040 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:01.626) 0:14:46.749 ****** 2026-01-22 12:19:34,040 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:01.626) 0:14:46.747 ****** 2026-01-22 12:19:34,400 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:34,408 p=33295 u=zuul n=ansible | TASK [edpm_prepare : Wait for keystone endpoint to exist in DNS url={{ _cifmw_edpm_prepare_keystone_endpoint_out.stdout | trim }}, status_code={{ _keystone_response_codes }}, validate_certs={{ cifmw_edpm_prepare_verify_tls }}] *** 2026-01-22 12:19:34,408 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:00.368) 0:14:47.118 ****** 2026-01-22 12:19:34,408 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:00.368) 0:14:47.116 ****** 2026-01-22 12:19:34,850 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:34,882 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:19:34,883 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:00.474) 0:14:47.592 ****** 2026-01-22 12:19:34,883 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:34 +0000 (0:00:00.474) 0:14:47.590 ****** 2026-01-22 12:19:35,023 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:35,032 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:19:35,032 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.149) 0:14:47.741 ****** 2026-01-22 12:19:35,032 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.149) 0:14:47.739 ****** 2026-01-22 12:19:35,182 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:35,192 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_ctlplane_deploy _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:19:35,192 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.160) 0:14:47.902 ****** 2026-01-22 12:19:35,192 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.160) 0:14:47.900 ****** 2026-01-22 12:19:35,327 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,358 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2026-01-22 12:19:35,359 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.166) 0:14:48.068 ****** 2026-01-22 12:19:35,359 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.166) 0:14:48.066 ****** 2026-01-22 12:19:35,557 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:35,574 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Define minimal set of repo variables when not running on Zuul _install_yamls_repos={{ ( { 'OPENSTACK_REPO': operators_build_output[cifmw_operator_build_meta_name].git_src_dir, 'OPENSTACK_BRANCH': '', 'GIT_CLONE_OPTS': '-l', } if (cifmw_operator_build_meta_name is defined and cifmw_operator_build_meta_name in operators_build_output) else {} ) }}] *** 2026-01-22 12:19:35,575 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.216) 0:14:48.284 ****** 2026-01-22 12:19:35,575 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.216) 0:14:48.282 ****** 2026-01-22 12:19:35,602 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,611 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Set install_yamls Makefile environment variables cifmw_edpm_deploy_baremetal_common_env={{ cifmw_install_yamls_environment | combine({'PATH': cifmw_path}) | combine(_install_yamls_repos | default({})) }}, cifmw_edpm_deploy_baremetal_make_openstack_env={{ cifmw_edpm_deploy_baremetal_make_openstack_env | default({}) | combine( { 'OPENSTACK_IMG': operators_build_output[cifmw_operator_build_meta_name].image_catalog, } if (cifmw_operator_build_meta_name is defined and cifmw_operator_build_meta_name in operators_build_output) else {} ) }}, cifmw_edpm_deploy_baremetal_operators_build_output={{ operators_build_output }}] *** 2026-01-22 12:19:35,611 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.036) 0:14:48.321 ****** 2026-01-22 12:19:35,612 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.036) 0:14:48.319 ****** 2026-01-22 12:19:35,650 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,660 p=33295 u=zuul n=ansible | TASK [Create virtual baremetal name=install_yamls_makes, tasks_from=make_edpm_baremetal_compute] *** 2026-01-22 12:19:35,660 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.048) 0:14:48.369 ****** 2026-01-22 12:19:35,660 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.048) 0:14:48.368 ****** 2026-01-22 12:19:35,686 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,695 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Create the config file mode=0644, content={{ cifmw_edpm_deploy_baremetal_nova_compute_extra_config }}, dest={{ _cifmw_edpm_deploy_baremetal_nova_extra_config_file }}] *** 2026-01-22 12:19:35,695 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.404 ****** 2026-01-22 12:19:35,695 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.402 ****** 2026-01-22 12:19:35,721 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,730 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Define DATAPLANE_EXTRA_NOVA_CONFIG_FILE cifmw_edpm_deploy_baremetal_common_env={{ cifmw_edpm_deploy_baremetal_common_env | default({}) | combine({'DATAPLANE_EXTRA_NOVA_CONFIG_FILE': _cifmw_edpm_deploy_baremetal_nova_extra_config_file }) }}, cacheable=True] *** 2026-01-22 12:19:35,730 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.439 ****** 2026-01-22 12:19:35,730 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.437 ****** 2026-01-22 12:19:35,755 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,764 p=33295 u=zuul n=ansible | TASK [Prepare OpenStack Dataplane NodeSet CR name=install_yamls_makes, tasks_from=make_edpm_deploy_baremetal_prep] *** 2026-01-22 12:19:35,764 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.473 ****** 2026-01-22 12:19:35,764 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.471 ****** 2026-01-22 12:19:35,793 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,804 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Perform kustomizations to the OpenStackDataPlaneNodeSet CR target_path={{ cifmw_edpm_deploy_openstack_crs_path }}, sort_ascending=False, kustomizations={% if content_provider_registry_ip is defined or not cifmw_edpm_deploy_baremetal_bootc %} apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization patches: - target: kind: OpenStackDataPlaneNodeSet patch: |- {% if content_provider_registry_ip is defined %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_container_registry_insecure_registries value: ["{{ content_provider_registry_ip }}:5001"] {% endif %} {% if not cifmw_edpm_deploy_baremetal_bootc %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_bootstrap_command value: sudo dnf -y update {% endif %} {% endif %}, kustomizations_paths={{ [ ( [ cifmw_edpm_deploy_baremetal_manifests_dir, 'kustomizations', 'dataplane' ] | ansible.builtin.path_join ) ] }}] *** 2026-01-22 12:19:35,804 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.040) 0:14:48.514 ****** 2026-01-22 12:19:35,804 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.040) 0:14:48.512 ****** 2026-01-22 12:19:35,829 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,838 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Log the CR that is about to be applied var=cifmw_edpm_deploy_baremetal_crs_kustomize_result] *** 2026-01-22 12:19:35,839 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.548 ****** 2026-01-22 12:19:35,839 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.034) 0:14:48.546 ****** 2026-01-22 12:19:35,867 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,878 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Create repo-setup-downstream OpenStackDataPlaneService _raw_params=oc apply -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }} -f "{{ cifmw_installyamls_repos }}/devsetup/edpm/services/dataplane_v1beta1_openstackdataplaneservice_reposetup_downstream.yaml"] *** 2026-01-22 12:19:35,878 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.038) 0:14:48.587 ****** 2026-01-22 12:19:35,878 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.038) 0:14:48.585 ****** 2026-01-22 12:19:35,900 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,910 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Get list of services defined under OpenStackDataPlaneNodeSet resource _raw_params=yq '.spec.services[]' {{ cifmw_edpm_deploy_baremetal_crs_kustomize_result.output_path }}] *** 2026-01-22 12:19:35,910 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.032) 0:14:48.619 ****** 2026-01-22 12:19:35,910 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.032) 0:14:48.617 ****** 2026-01-22 12:19:35,934 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,943 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Patch OpenStackDataPlaneNodeSet resource to add "repo-setup-downstream" service _raw_params=yq -i '.spec.services = ["repo-setup-downstream"] + .spec.services' {{ cifmw_edpm_deploy_baremetal_crs_kustomize_result.output_path }}] *** 2026-01-22 12:19:35,943 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.033) 0:14:48.653 ****** 2026-01-22 12:19:35,943 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.033) 0:14:48.651 ****** 2026-01-22 12:19:35,967 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:35,976 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Patch OpenStackDataPlaneNodeSet resource to replace "repo-setup" with "repo-setup-downstream" service _raw_params=yq -i '(.spec.services[] | select(. == "repo-setup")) |= "repo-setup-downstream"' {{ cifmw_edpm_deploy_baremetal_crs_kustomize_result.output_path }}] *** 2026-01-22 12:19:35,976 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.032) 0:14:48.685 ****** 2026-01-22 12:19:35,976 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:35 +0000 (0:00:00.032) 0:14:48.683 ****** 2026-01-22 12:19:36,001 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,010 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Apply the OpenStackDataPlaneNodeSet CR output_dir={{ cifmw_edpm_deploy_baremetal_basedir }}/artifacts, script=oc apply -f {{ cifmw_edpm_deploy_baremetal_crs_kustomize_result.output_path }}] *** 2026-01-22 12:19:36,010 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.033) 0:14:48.719 ****** 2026-01-22 12:19:36,010 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.033) 0:14:48.717 ****** 2026-01-22 12:19:36,034 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,043 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Wait for Ironic to be ready _raw_params=oc wait pod -l name=ironic -n baremetal-operator-system --for=condition=Ready --timeout={{ cifmw_edpm_deploy_baremetal_wait_ironic_timeout_mins }}m] *** 2026-01-22 12:19:36,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.033) 0:14:48.752 ****** 2026-01-22 12:19:36,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.033) 0:14:48.750 ****** 2026-01-22 12:19:36,070 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,081 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Wait for OpenStack Provision Server pod to be created _raw_params=oc get po -l osp-provisionserver/name=openstack-edpm-ipam-provisionserver -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }} -o name] *** 2026-01-22 12:19:36,082 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:48.791 ****** 2026-01-22 12:19:36,082 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:48.789 ****** 2026-01-22 12:19:36,108 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,120 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Wait for OpenStack Provision Server deployment to be available _raw_params=oc wait deployment openstack-edpm-ipam-provisionserver-openstackprovisionserver -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for condition=Available --timeout={{ cifmw_edpm_deploy_baremetal_wait_provisionserver_timeout_mins }}m] *** 2026-01-22 12:19:36,120 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:48.829 ****** 2026-01-22 12:19:36,120 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:48.827 ****** 2026-01-22 12:19:36,145 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,155 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Wait for baremetal nodes to reach 'provisioned' state _raw_params=oc wait bmh --all -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for=jsonpath='{.status.provisioning.state}'=provisioned --timeout={{ cifmw_edpm_deploy_baremetal_wait_bmh_timeout_mins }}m] *** 2026-01-22 12:19:36,155 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:48.865 ****** 2026-01-22 12:19:36,155 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:48.863 ****** 2026-01-22 12:19:36,182 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,191 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Register the list of compute nodes _raw_params=oc get bmh -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }}] *** 2026-01-22 12:19:36,191 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:48.900 ****** 2026-01-22 12:19:36,191 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:48.898 ****** 2026-01-22 12:19:36,214 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,224 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Print the list of compute nodes var=compute_nodes_output.stdout_lines] *** 2026-01-22 12:19:36,224 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.032) 0:14:48.933 ****** 2026-01-22 12:19:36,224 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.032) 0:14:48.931 ****** 2026-01-22 12:19:36,248 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,257 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Wait for OpenStackDataPlaneNodeSet to be deployed _raw_params=oc wait OpenStackDataPlaneNodeSet {{ cr_name }} --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for=condition=ready --timeout={{ cifmw_edpm_deploy_baremetal_wait_dataplane_timeout_mins }}m] *** 2026-01-22 12:19:36,257 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.032) 0:14:48.966 ****** 2026-01-22 12:19:36,257 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.032) 0:14:48.964 ****** 2026-01-22 12:19:36,279 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,288 p=33295 u=zuul n=ansible | TASK [edpm_deploy_baremetal : Run nova-manage discover_hosts to ensure compute nodes are mapped _raw_params=oc rsh -n {{ cifmw_install_yamls_defaults['NAMESPACE'] }} nova-cell0-conductor-0 nova-manage cell_v2 discover_hosts --verbose] *** 2026-01-22 12:19:36,288 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.031) 0:14:48.998 ****** 2026-01-22 12:19:36,288 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.031) 0:14:48.996 ****** 2026-01-22 12:19:36,312 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,330 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load parameters files dir={{ cifmw_basedir }}/artifacts/parameters] *** 2026-01-22 12:19:36,330 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.041) 0:14:49.040 ****** 2026-01-22 12:19:36,330 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.041) 0:14:49.038 ****** 2026-01-22 12:19:36,393 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:36,404 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Set compute config and common environment facts compute_config={{ cifmw_libvirt_manager_configuration['vms']['compute'] }}, cifmw_libvirt_manager_common_env={{ cifmw_install_yamls_environment | combine({'PATH': cifmw_path }) }}, cacheable=True] *** 2026-01-22 12:19:36,404 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.073) 0:14:49.113 ****** 2026-01-22 12:19:36,404 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.073) 0:14:49.111 ****** 2026-01-22 12:19:36,432 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,444 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Ensure needed directories exist path={{ item }}, state=directory, mode=0755] *** 2026-01-22 12:19:36,444 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.153 ****** 2026-01-22 12:19:36,444 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.151 ****** 2026-01-22 12:19:36,479 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=/home/zuul/ci-framework-data/workload) 2026-01-22 12:19:36,490 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/edpm_compute) 2026-01-22 12:19:36,496 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/openstack/cr/) 2026-01-22 12:19:36,497 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,508 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Ensure image is available _raw_params=get_image.yml] *** 2026-01-22 12:19:36,509 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.064) 0:14:49.218 ****** 2026-01-22 12:19:36,509 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.064) 0:14:49.216 ****** 2026-01-22 12:19:36,535 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,544 p=33295 u=zuul n=ansible | TASK [Create EDPM compute VMs name=install_yamls_makes, tasks_from=make_edpm_compute.yml] *** 2026-01-22 12:19:36,544 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:49.253 ****** 2026-01-22 12:19:36,544 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:49.251 ****** 2026-01-22 12:19:36,571 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,581 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Catch compute IPs _raw_params=virsh -c qemu:///system -q domifaddr --source arp --domain edpm-compute-{{ item }}] *** 2026-01-22 12:19:36,581 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.037) 0:14:49.291 ****** 2026-01-22 12:19:36,581 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.037) 0:14:49.289 ****** 2026-01-22 12:19:36,611 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,621 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Ensure we get SSH host={{ item.stdout.split()[-1].split('/')[0] }}, port=22, timeout=60] *** 2026-01-22 12:19:36,621 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.330 ****** 2026-01-22 12:19:36,621 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.328 ****** 2026-01-22 12:19:36,647 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,657 p=33295 u=zuul n=ansible | TASK [libvirt_manager : Output CR for extra computes dest={{ cifmw_libvirt_manager_basedir }}/artifacts/{{ cifmw_install_yamls_defaults['NAMESPACE'] }}/cr/99-cifmw-computes-{{ item }}.yaml, src=kustomize_compute.yml.j2, mode=0644] *** 2026-01-22 12:19:36,657 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.036) 0:14:49.366 ****** 2026-01-22 12:19:36,657 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.036) 0:14:49.364 ****** 2026-01-22 12:19:36,689 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,705 p=33295 u=zuul n=ansible | TASK [Prepare for HCI deploy phase 1 name=hci_prepare, tasks_from=phase1.yml] *** 2026-01-22 12:19:36,706 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.048) 0:14:49.415 ****** 2026-01-22 12:19:36,706 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.048) 0:14:49.413 ****** 2026-01-22 12:19:36,732 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,742 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Set EDPM related vars cifmw_edpm_deploy_env={{ cifmw_install_yamls_environment | combine({'PATH': cifmw_path}) | combine({'DATAPLANE_REGISTRY_URL': cifmw_edpm_deploy_registry_url }) | combine({'DATAPLANE_CONTAINER_TAG': cifmw_repo_setup_full_hash | default(cifmw_install_yamls_defaults['DATAPLANE_CONTAINER_TAG']) }) | combine(cifmw_edpm_deploy_extra_vars | default({})) | combine(_install_yamls_repos | default({})) }}, cacheable=True] *** 2026-01-22 12:19:36,742 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.036) 0:14:49.451 ****** 2026-01-22 12:19:36,742 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.036) 0:14:49.449 ****** 2026-01-22 12:19:36,770 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,781 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Create the config file mode=0644, content={{ cifmw_edpm_deploy_nova_compute_extra_config }}, dest={{ _cifmw_edpm_deploy_nova_extra_config_file }}] *** 2026-01-22 12:19:36,781 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.491 ****** 2026-01-22 12:19:36,781 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.489 ****** 2026-01-22 12:19:36,812 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,820 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Define DATAPLANE_EXTRA_NOVA_CONFIG_FILE cifmw_edpm_deploy_env={{ cifmw_edpm_deploy_env | default({}) | combine({'DATAPLANE_EXTRA_NOVA_CONFIG_FILE': _cifmw_edpm_deploy_nova_extra_config_file }) }}, cacheable=True] *** 2026-01-22 12:19:36,820 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:49.529 ****** 2026-01-22 12:19:36,820 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.038) 0:14:49.527 ****** 2026-01-22 12:19:36,854 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,865 p=33295 u=zuul n=ansible | TASK [Prepare OpenStack Dataplane NodeSet CR name=install_yamls_makes, tasks_from=make_edpm_deploy_prep] *** 2026-01-22 12:19:36,866 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.045) 0:14:49.575 ****** 2026-01-22 12:19:36,866 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.045) 0:14:49.573 ****** 2026-01-22 12:19:36,893 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,905 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Perform kustomizations to the OpenStackDataPlaneNodeSet CR target_path={{ cifmw_edpm_deploy_openstack_crs_path }}, sort_ascending=False, kustomizations_paths={{ [ ( [ cifmw_edpm_deploy_manifests_dir, 'kustomizations', 'dataplane' ] | ansible.builtin.path_join ) ] }}] *** 2026-01-22 12:19:36,905 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.615 ****** 2026-01-22 12:19:36,906 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.039) 0:14:49.613 ****** 2026-01-22 12:19:36,931 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,941 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Log the CR that is about to be applied var=cifmw_edpm_deploy_crs_kustomize_result] *** 2026-01-22 12:19:36,941 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:49.650 ****** 2026-01-22 12:19:36,941 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.035) 0:14:49.648 ****** 2026-01-22 12:19:36,967 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:36,979 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Apply dataplane resources but ignore DataPlaneDeployment kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, definition={{ lookup('file', cifmw_edpm_deploy_crs_kustomize_result.output_path) | from_yaml_all | rejectattr('kind', 'search', cifmw_edpm_deploy_step2_kind) }}] *** 2026-01-22 12:19:36,979 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.037) 0:14:49.688 ****** 2026-01-22 12:19:36,979 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:36 +0000 (0:00:00.037) 0:14:49.686 ****** 2026-01-22 12:19:37,006 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,019 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Wait for OpenStackDataPlaneNodeSet become SetupReady _raw_params=oc wait OpenStackDataPlaneNodeSet {{ cr_name }} --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for=condition=SetupReady --timeout={{ cifmw_edpm_deploy_timeout }}m] *** 2026-01-22 12:19:37,019 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.040) 0:14:49.729 ****** 2026-01-22 12:19:37,019 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.040) 0:14:49.727 ****** 2026-01-22 12:19:37,051 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,062 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Apply DataPlaneDeployment resource kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, definition={{ lookup('file', cifmw_edpm_deploy_crs_kustomize_result.output_path) | from_yaml_all | selectattr('kind', 'search', cifmw_edpm_deploy_step2_kind) }}] *** 2026-01-22 12:19:37,062 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.042) 0:14:49.771 ****** 2026-01-22 12:19:37,062 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.042) 0:14:49.770 ****** 2026-01-22 12:19:37,091 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,106 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Wait for OpenStackDataPlaneDeployment become Ready _raw_params=oc wait OpenStackDataPlaneDeployment {{ cr_name }} --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} --for=condition=Ready --timeout={{ cifmw_edpm_deploy_timeout }}m] *** 2026-01-22 12:19:37,106 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.043) 0:14:49.815 ****** 2026-01-22 12:19:37,106 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.043) 0:14:49.813 ****** 2026-01-22 12:19:37,132 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,141 p=33295 u=zuul n=ansible | TASK [edpm_deploy : Run nova-manage discover_hosts to ensure compute nodes are mapped output_dir={{ cifmw_basedir }}/artifacts, executable=/bin/bash, script=set -xe oc rsh --namespace={{ cifmw_install_yamls_defaults['NAMESPACE'] }} nova-cell0-conductor-0 nova-manage cell_v2 discover_hosts --verbose ] *** 2026-01-22 12:19:37,141 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.035) 0:14:49.851 ****** 2026-01-22 12:19:37,141 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.035) 0:14:49.849 ****** 2026-01-22 12:19:37,177 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,189 p=33295 u=zuul n=ansible | TASK [Validate EDPM name=install_yamls_makes, tasks_from=make_edpm_deploy_instance] *** 2026-01-22 12:19:37,189 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.047) 0:14:49.898 ****** 2026-01-22 12:19:37,189 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.047) 0:14:49.896 ****** 2026-01-22 12:19:37,218 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,267 p=33295 u=zuul n=ansible | PLAY [Deploy ceph using hooks] ************************************************* 2026-01-22 12:19:37,287 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:19:37,287 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.098) 0:14:49.996 ****** 2026-01-22 12:19:37,287 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.098) 0:14:49.994 ****** 2026-01-22 12:19:37,350 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:37,358 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:19:37,358 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.071) 0:14:50.068 ****** 2026-01-22 12:19:37,358 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.071) 0:14:50.066 ****** 2026-01-22 12:19:37,439 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:37,452 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_ceph _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:19:37,452 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.093) 0:14:50.161 ****** 2026-01-22 12:19:37,452 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.093) 0:14:50.159 ****** 2026-01-22 12:19:37,535 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,580 p=33295 u=zuul n=ansible | PLAY [Continue HCI deploy, deploy architecture and validate workflow] ********** 2026-01-22 12:19:37,614 p=33295 u=zuul n=ansible | TASK [Prepare for HCI deploy phase 2 name=hci_prepare, tasks_from=phase2.yml] *** 2026-01-22 12:19:37,614 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.162) 0:14:50.324 ****** 2026-01-22 12:19:37,614 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.162) 0:14:50.322 ****** 2026-01-22 12:19:37,639 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,648 p=33295 u=zuul n=ansible | TASK [Continue HCI deployment name=edpm_deploy] ******************************** 2026-01-22 12:19:37,648 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.033) 0:14:50.357 ****** 2026-01-22 12:19:37,648 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.033) 0:14:50.355 ****** 2026-01-22 12:19:37,674 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,685 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:19:37,685 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.037) 0:14:50.394 ****** 2026-01-22 12:19:37,685 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.037) 0:14:50.392 ****** 2026-01-22 12:19:37,745 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:37,772 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:19:37,772 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.087) 0:14:50.482 ****** 2026-01-22 12:19:37,772 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.087) 0:14:50.480 ****** 2026-01-22 12:19:37,871 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:37,885 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_deploy _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:19:37,885 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.112) 0:14:50.595 ****** 2026-01-22 12:19:37,885 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.112) 0:14:50.593 ****** 2026-01-22 12:19:37,975 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:37,990 p=33295 u=zuul n=ansible | TASK [Run validations name=validations] **************************************** 2026-01-22 12:19:37,990 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.104) 0:14:50.699 ****** 2026-01-22 12:19:37,990 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:37 +0000 (0:00:00.104) 0:14:50.697 ****** 2026-01-22 12:19:38,016 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,033 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:19:38,033 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.043) 0:14:50.743 ****** 2026-01-22 12:19:38,033 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.043) 0:14:50.741 ****** 2026-01-22 12:19:38,055 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,063 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:19:38,063 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:50.773 ****** 2026-01-22 12:19:38,063 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:50.771 ****** 2026-01-22 12:19:38,088 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,100 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for pre_deploy _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:19:38,101 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.037) 0:14:50.810 ****** 2026-01-22 12:19:38,101 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.037) 0:14:50.808 ****** 2026-01-22 12:19:38,190 p=33295 u=zuul n=ansible | skipping: [localhost] => (item={'name': 'Fetch compute facts', 'type': 'playbook', 'inventory': '/home/zuul/ci-framework-data/artifacts/zuul_inventory.yml', 'source': 'fetch_compute_facts.yml'}) 2026-01-22 12:19:38,192 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,208 p=33295 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Environment Definition file existence path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2026-01-22 12:19:38,209 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.107) 0:14:50.918 ****** 2026-01-22 12:19:38,209 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.107) 0:14:50.916 ****** 2026-01-22 12:19:38,235 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,246 p=33295 u=zuul n=ansible | TASK [networking_mapper : Check for Networking Definition file existance that=['_net_env_def_stat.stat.exists'], msg=Ensure that the Networking Environment Definition file exists in {{ cifmw_networking_mapper_networking_env_def_path }}, quiet=True] *** 2026-01-22 12:19:38,246 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.037) 0:14:50.955 ****** 2026-01-22 12:19:38,246 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.037) 0:14:50.953 ****** 2026-01-22 12:19:38,267 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,276 p=33295 u=zuul n=ansible | TASK [networking_mapper : Load the Networking Definition from file path={{ cifmw_networking_mapper_networking_env_def_path }}] *** 2026-01-22 12:19:38,276 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:50.986 ****** 2026-01-22 12:19:38,276 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:50.984 ****** 2026-01-22 12:19:38,300 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,310 p=33295 u=zuul n=ansible | TASK [networking_mapper : Set cifmw_networking_env_definition is present cifmw_networking_env_definition={{ _net_env_def_slurp['content'] | b64decode | from_yaml }}, cacheable=True] *** 2026-01-22 12:19:38,310 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.033) 0:14:51.020 ****** 2026-01-22 12:19:38,310 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.033) 0:14:51.018 ****** 2026-01-22 12:19:38,331 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,347 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Fetch network facts gather_subset=network] ***************** 2026-01-22 12:19:38,347 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.036) 0:14:51.057 ****** 2026-01-22 12:19:38,347 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.036) 0:14:51.055 ****** 2026-01-22 12:19:38,376 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=controller) 2026-01-22 12:19:38,380 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=crc) 2026-01-22 12:19:38,392 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=localhost) 2026-01-22 12:19:38,399 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=standalone) 2026-01-22 12:19:38,400 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,410 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Look for nova migration keypair file path={{ cifmw_basedir }}/artifacts/nova_migration_key] *** 2026-01-22 12:19:38,410 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.062) 0:14:51.119 ****** 2026-01-22 12:19:38,410 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.062) 0:14:51.117 ****** 2026-01-22 12:19:38,431 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,440 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Create nova migration keypair if does not exists comment=nova migration, path={{ _ssh_file }}, type={{ cifmw_ssh_keytype | default('ecdsa') }}, size={{ cifmw_ssh_keysize | default(521) }}] *** 2026-01-22 12:19:38,440 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.150 ****** 2026-01-22 12:19:38,440 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.148 ****** 2026-01-22 12:19:38,461 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,469 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Generate needed facts out of local files cifmw_ci_gen_kustomize_values_ssh_authorizedkeys={{ lookup('file', '~/.ssh/id_cifw.pub', rstrip=False) }}, cifmw_ci_gen_kustomize_values_ssh_private_key={{ lookup('file', '~/.ssh/id_cifw', rstrip=False) }}, cifmw_ci_gen_kustomize_values_ssh_public_key={{ lookup('file', '~/.ssh/id_cifw.pub', rstrip=False) }}, cifmw_ci_gen_kustomize_values_migration_pub_key={{ lookup('file', _ssh_file ~ '.pub', rstrip=False)}}, cifmw_ci_gen_kustomize_values_migration_priv_key={{ lookup('file', _ssh_file, rstrip=False) }}, cifmw_ci_gen_kustomize_values_sshd_ranges={{ _ipv4_sshd_ranges + _ipv6_sshd_ranges }}] *** 2026-01-22 12:19:38,470 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.029) 0:14:51.179 ****** 2026-01-22 12:19:38,470 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.029) 0:14:51.177 ****** 2026-01-22 12:19:38,491 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,500 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Set cifmw_architecture_automation_file if not set before cifmw_architecture_automation_file={{ ( cifmw_architecture_repo, 'automation/vars', cifmw_architecture_scenario~'.yaml' ) | ansible.builtin.path_join }}] *** 2026-01-22 12:19:38,500 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.209 ****** 2026-01-22 12:19:38,500 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.207 ****** 2026-01-22 12:19:38,522 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,530 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Load architecture automation file path={{ cifmw_architecture_automation_file }}] *** 2026-01-22 12:19:38,530 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.240 ****** 2026-01-22 12:19:38,530 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.238 ****** 2026-01-22 12:19:38,550 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,559 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Prepare automation data cifmw_deploy_architecture_steps={{ _parsed['vas'][cifmw_architecture_scenario] }}] *** 2026-01-22 12:19:38,559 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.028) 0:14:51.268 ****** 2026-01-22 12:19:38,559 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.028) 0:14:51.266 ****** 2026-01-22 12:19:38,580 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,593 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Ensure that architecture repo exists repo={{ cifmw_kustomize_deploy_architecture_repo_url }}, dest={{ cifmw_kustomize_deploy_architecture_repo_dest_dir }}, update=False, version={{ cifmw_kustomize_deploy_architecture_repo_version }}] *** 2026-01-22 12:19:38,593 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.034) 0:14:51.302 ****** 2026-01-22 12:19:38,593 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.034) 0:14:51.301 ****** 2026-01-22 12:19:38,614 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,627 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Gather the list of scenario folders paths={{ _va_scenario_dir }}, patterns=kustomization.y*ml, recurse=True] *** 2026-01-22 12:19:38,627 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.034) 0:14:51.337 ****** 2026-01-22 12:19:38,627 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.034) 0:14:51.335 ****** 2026-01-22 12:19:38,659 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,667 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Check if scenario is in the list msg=You need to properly set the `cifmw_architecture_scenario` variable in order to select the architecture-based scenario to deploy. You can take a list of scenario in the `examples/va` folder in the architecture repo. ] *** 2026-01-22 12:19:38,667 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.039) 0:14:51.376 ****** 2026-01-22 12:19:38,667 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.039) 0:14:51.374 ****** 2026-01-22 12:19:38,697 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,706 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Ensure that destination directory exists path={{ cifmw_kustomize_deploy_kustomizations_dest_dir }}, mode=0755, state=directory] *** 2026-01-22 12:19:38,706 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.038) 0:14:51.415 ****** 2026-01-22 12:19:38,706 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.038) 0:14:51.413 ****** 2026-01-22 12:19:38,725 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,734 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Set the final cifmw_architecture_user_kustomize based on its patches _cifmw_kustomize_deploy_user_kustomize={{ _cifmw_kustomize_deploy_user_kustomize | default({}) | combine(item.value, recursive=True) }}] *** 2026-01-22 12:19:38,734 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.028) 0:14:51.444 ****** 2026-01-22 12:19:38,735 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.028) 0:14:51.442 ****** 2026-01-22 12:19:38,802 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=cifmw_architecture_user_kustomize) 2026-01-22 12:19:38,803 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,813 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Set the final cifmw_architecture_user_kustomize_base64 based on its patches _cifmw_kustomize_deploy_user_base64_kustomize={{ _b64_kustomize_user_patches | ci_kustomize_deploy_combine_base64_patch_dict }}] *** 2026-01-22 12:19:38,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.078) 0:14:51.522 ****** 2026-01-22 12:19:38,813 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.078) 0:14:51.520 ****** 2026-01-22 12:19:38,854 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,869 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Load nncp kustomization.yaml path={{ _nncp_kust_path }}] *** 2026-01-22 12:19:38,869 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.056) 0:14:51.579 ****** 2026-01-22 12:19:38,870 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.056) 0:14:51.577 ****** 2026-01-22 12:19:38,896 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,905 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Remove node_1 and node_2 backup=True, content={{ _nncp_updated | to_nice_yaml }}, dest={{ _nncp_kust_path }}, mode=0644] *** 2026-01-22 12:19:38,905 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.035) 0:14:51.614 ****** 2026-01-22 12:19:38,905 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.035) 0:14:51.612 ****** 2026-01-22 12:19:38,943 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,952 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Reduce NNCP ocp_nodes backup=True, dest={{ _nncp_ocp_nodes_path }}, mode=0644, content=--- apiVersion: nmstate.io/v1 kind: NodeNetworkConfigurationPolicy metadata: name: node-0 labels: osp/nncm-config-type: standard ] *** 2026-01-22 12:19:38,952 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.047) 0:14:51.662 ****** 2026-01-22 12:19:38,952 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.047) 0:14:51.660 ****** 2026-01-22 12:19:38,973 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:38,983 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Replace replicas backup=True, path={{ _ctlplane_path }}, regexp=^(.+) replicas: [2-9]+$, replace=\1 replicas: 1] *** 2026-01-22 12:19:38,983 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.692 ****** 2026-01-22 12:19:38,983 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:38 +0000 (0:00:00.030) 0:14:51.690 ****** 2026-01-22 12:19:39,003 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,021 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create role needed directories path={{ cifmw_cls_manifests_dir }}, state=directory, mode=0755] *** 2026-01-22 12:19:39,021 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.038) 0:14:51.730 ****** 2026-01-22 12:19:39,021 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.038) 0:14:51.728 ****** 2026-01-22 12:19:39,039 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,047 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create the cifmw_cls_namespace namespace" kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit) }}, name={{ cifmw_cls_namespace }}, kind=Namespace, state=present] *** 2026-01-22 12:19:39,048 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.026) 0:14:51.757 ****** 2026-01-22 12:19:39,048 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.026) 0:14:51.755 ****** 2026-01-22 12:19:39,065 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,073 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Save storage manifests as artifacts dest={{ cifmw_cls_manifests_dir }}/storage-class.yaml, content={{ cifmw_cls_storage_manifest | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:19:39,073 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.025) 0:14:51.782 ****** 2026-01-22 12:19:39,073 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.025) 0:14:51.780 ****** 2026-01-22 12:19:39,091 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,100 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Get k8s nodes kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit)}}, context={{ cifmw_openshift_context | default(omit)}}, kind=Node] *** 2026-01-22 12:19:39,100 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.027) 0:14:51.809 ****** 2026-01-22 12:19:39,100 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.027) 0:14:51.807 ****** 2026-01-22 12:19:39,119 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,129 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Fetch hostnames for all hosts _raw_params=hostname] *** 2026-01-22 12:19:39,129 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.029) 0:14:51.838 ****** 2026-01-22 12:19:39,129 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.029) 0:14:51.836 ****** 2026-01-22 12:19:39,156 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=crc) 2026-01-22 12:19:39,163 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=standalone) 2026-01-22 12:19:39,170 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=controller) 2026-01-22 12:19:39,184 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=localhost) 2026-01-22 12:19:39,186 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,198 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Set the hosts k8s ansible hosts cifmw_ci_local_storage_k8s_hosts={{ _host_map | selectattr("key", "in", k8s_nodes_hostnames) | map(attribute="value") | list }}, cifmw_ci_local_storage_k8s_hostnames={{ k8s_nodes_hostnames }}] *** 2026-01-22 12:19:39,198 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.069) 0:14:51.908 ****** 2026-01-22 12:19:39,198 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.069) 0:14:51.906 ****** 2026-01-22 12:19:39,220 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,230 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Apply the storage class manifests kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, src={{ cifmw_cls_manifests_dir }}/storage-class.yaml] *** 2026-01-22 12:19:39,230 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.031) 0:14:51.939 ****** 2026-01-22 12:19:39,230 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.031) 0:14:51.937 ****** 2026-01-22 12:19:39,253 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,266 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Create directories on worker node _raw_params=worker_node_dirs.yml] *** 2026-01-22 12:19:39,266 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.036) 0:14:51.976 ****** 2026-01-22 12:19:39,266 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.036) 0:14:51.974 ****** 2026-01-22 12:19:39,290 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=crc) 2026-01-22 12:19:39,291 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,304 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Generate pv related storage manifest file src=storage.yaml.j2, dest={{ cifmw_cls_manifests_dir }}/storage.yaml, mode=0644] *** 2026-01-22 12:19:39,304 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.037) 0:14:52.013 ****** 2026-01-22 12:19:39,304 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.037) 0:14:52.011 ****** 2026-01-22 12:19:39,330 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,339 p=33295 u=zuul n=ansible | TASK [ci_local_storage : Apply pv related storage manifest file kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, src={{ cifmw_cls_manifests_dir }}/storage.yaml] *** 2026-01-22 12:19:39,339 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.034) 0:14:52.048 ****** 2026-01-22 12:19:39,339 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.034) 0:14:52.046 ****** 2026-01-22 12:19:39,361 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,377 p=33295 u=zuul n=ansible | TASK [Install subscriptions name=ci_gen_kustomize_values, tasks_from=olm_subscriptions_overlay.yml] *** 2026-01-22 12:19:39,377 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.038) 0:14:52.086 ****** 2026-01-22 12:19:39,377 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.038) 0:14:52.084 ****** 2026-01-22 12:19:39,399 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:39,408 p=33295 u=zuul n=ansible | TASK [Generate values.yaml for OLM resources name=ci_gen_kustomize_values] ***** 2026-01-22 12:19:39,409 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.031) 0:14:52.118 ****** 2026-01-22 12:19:39,409 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.031) 0:14:52.116 ****** 2026-01-22 12:19:39,463 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Generate snippets files _raw_params=generate_snippets.yml] *** 2026-01-22 12:19:39,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.054) 0:14:52.172 ****** 2026-01-22 12:19:39,463 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.054) 0:14:52.170 ****** 2026-01-22 12:19:39,503 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_gen_kustomize_values/tasks/generate_snippets.yml for localhost 2026-01-22 12:19:39,514 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure needed parameter is properly set that=['cifmw_architecture_scenario is defined', 'cifmw_architecture_scenario is not none'], msg=cifmw_architecture_scenario must be provided.] *** 2026-01-22 12:19:39,514 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.051) 0:14:52.224 ****** 2026-01-22 12:19:39,514 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.051) 0:14:52.222 ****** 2026-01-22 12:19:39,544 p=33295 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:19:39,560 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Stat original source file path={{ cifmw_ci_gen_kustomize_values_src_file }}, get_attributes=False, get_checksum=False, get_mime=False] *** 2026-01-22 12:19:39,561 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.046) 0:14:52.270 ****** 2026-01-22 12:19:39,561 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.046) 0:14:52.268 ****** 2026-01-22 12:19:39,764 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:39,773 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Assert source file exists that=['_src_stat.stat.exists'], msg={{ cifmw_ci_gen_kustomize_values_src_file }} doesn't exist.] *** 2026-01-22 12:19:39,773 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.212) 0:14:52.482 ****** 2026-01-22 12:19:39,773 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.212) 0:14:52.480 ****** 2026-01-22 12:19:39,809 p=33295 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:19:39,818 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Load original values file src={{ cifmw_ci_gen_kustomize_values_src_file }}] *** 2026-01-22 12:19:39,818 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.044) 0:14:52.527 ****** 2026-01-22 12:19:39,818 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:39 +0000 (0:00:00.044) 0:14:52.525 ****** 2026-01-22 12:19:39,993 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:40,004 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Expose common data for future usage values_datatype={{ _datatype }}, snippet_datadir={{ _dest_dir }}, original_content={{ _config_map_content }}, _cifmw_gen_kustomize_values_extra_manifests={{ _raw_manifests | reject('equalto', _config_map_content) }}, _cifmw_gen_kustomize_values_base_cm_content={{ _config_map_content | ansible.utils.remove_keys( target=_cifmw_gen_kustomize_values_reject_expressions, matching_parameter='regex') }}, cacheable=False] *** 2026-01-22 12:19:40,004 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.186) 0:14:52.714 ****** 2026-01-22 12:19:40,005 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.186) 0:14:52.712 ****** 2026-01-22 12:19:40,216 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:40,229 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure we get the needed data depending on the values type _raw_params={{ _tasks }}] *** 2026-01-22 12:19:40,229 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.225) 0:14:52.939 ****** 2026-01-22 12:19:40,230 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.225) 0:14:52.937 ****** 2026-01-22 12:19:40,259 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:40,268 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure output directory exists path={{ snippet_datadir }}, state=directory, mode=0755] *** 2026-01-22 12:19:40,268 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.038) 0:14:52.978 ****** 2026-01-22 12:19:40,268 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.038) 0:14:52.976 ****** 2026-01-22 12:19:40,471 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:40,482 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Generate CI snippet backup=True, dest={{ (snippet_datadir, '02_ci_data.yaml') | path_join }}, src={{ _tmpl_check_path | first }}, mode=0644] *** 2026-01-22 12:19:40,483 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.214) 0:14:53.192 ****** 2026-01-22 12:19:40,483 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.214) 0:14:53.190 ****** 2026-01-22 12:19:40,956 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:40,966 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Generate the base64 CI ConfigMap patches _base64_patch={{ _patches_tuple[0] }}, _cifmw_gen_kustomize_values_extra_manifests={{ _patches_tuple[1] }}] *** 2026-01-22 12:19:40,966 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.483) 0:14:53.675 ****** 2026-01-22 12:19:40,966 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:40 +0000 (0:00:00.483) 0:14:53.673 ****** 2026-01-22 12:19:41,001 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:41,011 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Push base64 CI patches backup=True, dest={{ (snippet_datadir, '03_user_data_b64.yaml') | path_join }}, content={{ _base64_patch | default({}) | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:19:41,011 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.045) 0:14:53.721 ****** 2026-01-22 12:19:41,011 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.045) 0:14:53.719 ****** 2026-01-22 12:19:41,429 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:41,438 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Push user provided dataset backup=True, dest={{ (snippet_datadir, '04_user_data.yaml') | path_join }}, content={{ cifmw_ci_gen_kustomize_values_userdata | default({}) | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:19:41,438 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.426) 0:14:54.147 ****** 2026-01-22 12:19:41,438 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.426) 0:14:54.145 ****** 2026-01-22 12:19:41,900 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:41,909 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Copy the base values.yaml backup=True, content={{ _cifmw_gen_kustomize_values_base_cm_content |to_nice_yaml }}, dest={{ ( snippet_datadir, cifmw_ci_gen_kustomize_values_original_cm_content_file_name ) | path_join }}, mode=0644] *** 2026-01-22 12:19:41,909 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.471) 0:14:54.619 ****** 2026-01-22 12:19:41,910 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:41 +0000 (0:00:00.471) 0:14:54.617 ****** 2026-01-22 12:19:42,336 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:42,348 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Generate values file _raw_params=generate_values.yml] *** 2026-01-22 12:19:42,348 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.438) 0:14:55.058 ****** 2026-01-22 12:19:42,348 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.438) 0:14:55.056 ****** 2026-01-22 12:19:42,387 p=33295 u=zuul n=ansible | included: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/roles/ci_gen_kustomize_values/tasks/generate_values.yml for localhost 2026-01-22 12:19:42,406 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure we have needed parameter that=['values_datatype is defined', "values_datatype != ''"], msg=Please do not call this tasks file without calling the generate_snippet.yml first!] *** 2026-01-22 12:19:42,406 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.057) 0:14:55.115 ****** 2026-01-22 12:19:42,406 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.057) 0:14:55.113 ****** 2026-01-22 12:19:42,442 p=33295 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:19:42,452 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : List snippets paths={{ _dir_path }}, patterns=*.yml,*.yaml, excludes={{ cifmw_ci_gen_kustomize_values_original_cm_content_file_name }}, recurse=False] *** 2026-01-22 12:19:42,452 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.046) 0:14:55.162 ****** 2026-01-22 12:19:42,453 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.046) 0:14:55.160 ****** 2026-01-22 12:19:42,651 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:42,661 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure _content is empty _content={}] ********** 2026-01-22 12:19:42,661 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.208) 0:14:55.370 ****** 2026-01-22 12:19:42,661 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.208) 0:14:55.368 ****** 2026-01-22 12:19:42,686 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:42,697 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Load various snippet files path={{ file.path }}] *** 2026-01-22 12:19:42,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.035) 0:14:55.406 ****** 2026-01-22 12:19:42,697 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:42 +0000 (0:00:00.035) 0:14:55.404 ****** 2026-01-22 12:19:42,927 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/02_ci_data.yaml) 2026-01-22 12:19:43,098 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/03_user_data_b64.yaml) 2026-01-22 12:19:43,267 p=33295 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/04_user_data.yaml) 2026-01-22 12:19:43,277 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Combine snippets _content={{ _content | default(_cifmw_gen_kustomize_values_base_cm_content, true) | combine(_parsed, recursive=true) }} ] *** 2026-01-22 12:19:43,278 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.580) 0:14:55.987 ****** 2026-01-22 12:19:43,278 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.580) 0:14:55.985 ****** 2026-01-22 12:19:43,315 p=33295 u=zuul n=ansible | ok: [localhost] => (item=02_ci_data.yaml) 2026-01-22 12:19:43,327 p=33295 u=zuul n=ansible | ok: [localhost] => (item=03_user_data_b64.yaml) 2026-01-22 12:19:43,338 p=33295 u=zuul n=ansible | ok: [localhost] => (item=04_user_data.yaml) 2026-01-22 12:19:43,350 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Ensure directories exist path={{ _destdir }}, state=directory, mode=0755] *** 2026-01-22 12:19:43,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.072) 0:14:56.059 ****** 2026-01-22 12:19:43,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.072) 0:14:56.057 ****** 2026-01-22 12:19:43,549 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:43,558 p=33295 u=zuul n=ansible | TASK [ci_gen_kustomize_values : Output values file backup=True, dest={{ (_destdir, cifmw_ci_gen_kustomize_values_dest_filename) | path_join }}, content={{ ( [ _content ] + _cifmw_gen_kustomize_values_extra_manifests ) | cifmw.general.to_nice_yaml_all }}, mode=0644] *** 2026-01-22 12:19:43,558 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.207) 0:14:56.267 ****** 2026-01-22 12:19:43,558 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.207) 0:14:56.265 ****** 2026-01-22 12:19:43,958 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:43,975 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Slurp generated values.yaml for OLM resources src={{ (cifmw_kustomize_deploy_basedir, 'artifacts', 'ci_gen_kustomize_values', 'olm-values', 'values.yaml') | path_join }}] *** 2026-01-22 12:19:43,975 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.416) 0:14:56.684 ****** 2026-01-22 12:19:43,975 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:43 +0000 (0:00:00.416) 0:14:56.682 ****** 2026-01-22 12:19:43,996 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,005 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Copy generated values.yaml for OLM resources to localhost content={{ _cifmw_kustomize_deploy_olm_values_content.content | b64decode }}, dest={{ ( cifmw_kustomize_deploy_olm_source_files, 'values.yaml' ) | path_join }}, mode=0644] *** 2026-01-22 12:19:44,005 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:56.715 ****** 2026-01-22 12:19:44,006 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:56.713 ****** 2026-01-22 12:19:44,034 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,043 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Generate the OLM kustomization file content={{ lookup( 'kubernetes.core.kustomize', dir=cifmw_kustomize_deploy_olm_source_files ) }}, dest={{ cifmw_kustomize_deploy_olm_dest_file }}, mode=0644] *** 2026-01-22 12:19:44,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:56.752 ****** 2026-01-22 12:19:44,043 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:56.750 ****** 2026-01-22 12:19:44,063 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,073 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Apply the kustomized CRs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, wait=True, src={{ cifmw_kustomize_deploy_olm_dest_file }}] *** 2026-01-22 12:19:44,073 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:56.783 ****** 2026-01-22 12:19:44,074 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:56.781 ****** 2026-01-22 12:19:44,095 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,105 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for the openstack operators Subscription to be created kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, api_version={{ _cifmw_kustomize_deploy_olm_osp_operator_subscription.apiVersion }}, kind=Subscription, namespace={{ _cifmw_kustomize_deploy_olm_osp_operator_subscription.metadata.namespace }}, name={{ _cifmw_kustomize_deploy_olm_osp_operator_subscription.metadata.name }}] *** 2026-01-22 12:19:44,105 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.814 ****** 2026-01-22 12:19:44,105 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.812 ****** 2026-01-22 12:19:44,127 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,136 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Install plan _raw_params=install_plan.yml] ************ 2026-01-22 12:19:44,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.846 ****** 2026-01-22 12:19:44,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.844 ****** 2026-01-22 12:19:44,158 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,169 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for the openstack operators InstallPlan to be finished kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, api_version={{ _install_plan.apiVersion }}, kind=InstallPlan, namespace={{ _install_plan.namespace }}, name={{ _install_plan.name }}] *** 2026-01-22 12:19:44,169 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:56.878 ****** 2026-01-22 12:19:44,169 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:56.876 ****** 2026-01-22 12:19:44,191 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,202 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for cert-manager-operator pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=cert-manager-operator, label_selectors=['name = cert-manager-operator'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,203 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.033) 0:14:56.912 ****** 2026-01-22 12:19:44,203 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.033) 0:14:56.910 ****** 2026-01-22 12:19:44,227 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,239 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for cainjector pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=cert-manager, label_selectors=['app = cainjector'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,239 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.036) 0:14:56.948 ****** 2026-01-22 12:19:44,239 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.036) 0:14:56.946 ****** 2026-01-22 12:19:44,260 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,270 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for webhook pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=cert-manager, label_selectors=['app = webhook'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,270 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.980 ****** 2026-01-22 12:19:44,270 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:56.978 ****** 2026-01-22 12:19:44,292 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,302 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for certmanager pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=cert-manager, label_selectors=['app = cert-manager'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,302 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.011 ****** 2026-01-22 12:19:44,302 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.009 ****** 2026-01-22 12:19:44,323 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,333 p=33295 u=zuul n=ansible | TASK [openshift_setup : Write catalog source kubeconfig={{ cifmw_openshift_kubeconfig }}, state=present, definition={'apiVersion': 'operators.coreos.com/v1alpha1', 'kind': 'CatalogSource', 'metadata': {'name': '{{ cifmw_openshift_setup_operator_override_catalog_name }}', 'namespace': '{{ cifmw_openshift_setup_operator_override_catalog_namespace }}'}, 'spec': {'displayName': '{{ cifmw_openshift_setup_operator_override_catalog_name }}', 'image': '{{ cifmw_openshift_setup_operator_override_catalog_image }}', 'publisher': 'CI-Framework', 'sourceType': 'grpc', 'updateStrategy': {'registryPoll': {'interval': '10m'}}}}] *** 2026-01-22 12:19:44,333 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:57.042 ****** 2026-01-22 12:19:44,333 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:57.040 ****** 2026-01-22 12:19:44,354 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,365 p=33295 u=zuul n=ansible | TASK [openshift_setup : Patch metallb operator subscription to use custom CatalogSource state=patched, kind=Subscription, api_version=operators.coreos.com/v1alpha1, kubeconfig={{ cifmw_openshift_kubeconfig }}, name=metallb-operator-sub, namespace=metallb-system, definition={'spec': {'source': '{{ cifmw_openshift_setup_operator_override_catalog_name }}'}}, wait=True, wait_timeout=300, wait_condition={'type': 'CatalogSourcesUnhealthy', 'status': 'False'}] *** 2026-01-22 12:19:44,365 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.075 ****** 2026-01-22 12:19:44,365 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.073 ****** 2026-01-22 12:19:44,393 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,402 p=33295 u=zuul n=ansible | TASK [openshift_setup : Patch nmstate operator subscription to use custom CatalogSource state=patched, kind=Subscription, api_version=operators.coreos.com/v1alpha1, kubeconfig={{ cifmw_openshift_kubeconfig }}, name=kubernetes-nmstate-operator, namespace=openshift-nmstate, definition={'spec': {'source': '{{ cifmw_openshift_setup_operator_override_catalog_name }}', 'wait_sleep': 10}}, wait=True, wait_timeout=300, wait_condition={'type': 'CatalogSourcesUnhealthy', 'status': 'False'}] *** 2026-01-22 12:19:44,402 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:57.112 ****** 2026-01-22 12:19:44,403 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:57.110 ****** 2026-01-22 12:19:44,423 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,442 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for controller-manager deployment kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Deployment, namespace=metallb-system, label_selectors=['control-plane = controller-manager'], wait=True, wait_condition={'type': 'Available', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,443 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.040) 0:14:57.152 ****** 2026-01-22 12:19:44,443 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.040) 0:14:57.150 ****** 2026-01-22 12:19:44,463 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,473 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for webhook-server deployment kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Deployment, namespace=metallb-system, label_selectors=['component = webhook-server'], wait=True, wait_condition={'type': 'Available', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,473 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:57.182 ****** 2026-01-22 12:19:44,473 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.030) 0:14:57.180 ****** 2026-01-22 12:19:44,498 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,509 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait until NMstate operator resources are deployed kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Deployment, namespace=openshift-nmstate, name=nmstate-operator, wait=True, wait_condition={'type': 'Available', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,509 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.035) 0:14:57.218 ****** 2026-01-22 12:19:44,509 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.035) 0:14:57.216 ****** 2026-01-22 12:19:44,536 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,547 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Generate MetalLB kustomization file content={{ lookup( 'kubernetes.core.kustomize', dir=cifmw_kustomize_deploy_metallb_source_files ) }}, dest={{ cifmw_kustomize_deploy_metallb_dest_file }}, mode=0644] *** 2026-01-22 12:19:44,548 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.038) 0:14:57.257 ****** 2026-01-22 12:19:44,548 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.038) 0:14:57.255 ****** 2026-01-22 12:19:44,569 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,579 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Apply the kustomized MetalLB CRs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, wait=True, src={{ cifmw_kustomize_deploy_metallb_dest_file }}] *** 2026-01-22 12:19:44,579 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.288 ****** 2026-01-22 12:19:44,579 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.286 ****** 2026-01-22 12:19:44,609 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,618 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for MetalLB speaker pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=metallb-system, label_selectors=['component = speaker'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,618 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.039) 0:14:57.328 ****** 2026-01-22 12:19:44,618 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.039) 0:14:57.326 ****** 2026-01-22 12:19:44,639 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,649 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Generate NMstate kustomization file content={{ lookup( 'kubernetes.core.kustomize', dir=cifmw_kustomize_deploy_nmstate_source_files ) }}, dest={{ cifmw_kustomize_deploy_nmstate_dest_file }}, mode=0644] *** 2026-01-22 12:19:44,650 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.359 ****** 2026-01-22 12:19:44,650 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.357 ****** 2026-01-22 12:19:44,671 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,682 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Apply the kustomized NMstate CRs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, wait=True, src={{ cifmw_kustomize_deploy_nmstate_dest_file }}] *** 2026-01-22 12:19:44,683 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.392 ****** 2026-01-22 12:19:44,683 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.390 ****** 2026-01-22 12:19:44,707 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,716 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for NMstate handler pods kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Pod, namespace=openshift-nmstate, label_selectors=['component = kubernetes-nmstate-handler'], wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.034) 0:14:57.426 ****** 2026-01-22 12:19:44,717 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.034) 0:14:57.424 ****** 2026-01-22 12:19:44,737 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,746 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait for NMstate webhook deployment kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Deployment, namespace=openshift-nmstate, name=nmstate-webhook, wait=True, wait_condition={'type': 'Available', 'status': 'True'}, wait_timeout=300] *** 2026-01-22 12:19:44,746 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.029) 0:14:57.455 ****** 2026-01-22 12:19:44,746 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.029) 0:14:57.454 ****** 2026-01-22 12:19:44,768 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,778 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Check if the OpenStack initialization CRD exists kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, kind=CustomResourceDefinition, name=openstacks.operator.openstack.org] *** 2026-01-22 12:19:44,778 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.488 ****** 2026-01-22 12:19:44,779 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.486 ****** 2026-01-22 12:19:44,801 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,809 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Generate OpenStack initialization kustomization file content={{ lookup( 'kubernetes.core.kustomize', dir=cifmw_kustomize_deploy_openstack_source_files ) }}, dest={{ cifmw_kustomize_deploy_openstack_dest_file }}, mode=0644] *** 2026-01-22 12:19:44,810 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.519 ****** 2026-01-22 12:19:44,810 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.031) 0:14:57.517 ****** 2026-01-22 12:19:44,829 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,842 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Apply the kustomized OpenStack initialization CRs kubeconfig={{ cifmw_openshift_kubeconfig }}, api_key={{ cifmw_openshift_token | default(omit) }}, context={{ cifmw_openshift_context | default(omit) }}, state=present, wait=True, src={{ cifmw_kustomize_deploy_openstack_dest_file }}] *** 2026-01-22 12:19:44,842 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.551 ****** 2026-01-22 12:19:44,842 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.032) 0:14:57.549 ****** 2026-01-22 12:19:44,868 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,880 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait until OpenStack operators are deployed and ready (new install paradigm) kubeconfig={{ cifmw_openshift_kubeconfig }}, api_version=operator.openstack.org/v1beta1, kind=OpenStack, namespace={{ operator_namespace }}, name=openstack, wait=True, wait_condition={'type': 'Ready', 'status': 'True'}, wait_timeout=600] *** 2026-01-22 12:19:44,880 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.038) 0:14:57.590 ****** 2026-01-22 12:19:44,880 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.038) 0:14:57.588 ****** 2026-01-22 12:19:44,907 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,918 p=33295 u=zuul n=ansible | TASK [kustomize_deploy : Wait until OpenStack operators are deployed and ready (old install paradigm) kubeconfig={{ cifmw_openshift_kubeconfig }}, kind=Deployment, namespace={{ operator_namespace }}, label_selectors=['{{ item }}'], wait=True, wait_condition={'type': 'Available', 'status': 'True'}, wait_timeout=600] *** 2026-01-22 12:19:44,918 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:57.627 ****** 2026-01-22 12:19:44,918 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.037) 0:14:57.625 ****** 2026-01-22 12:19:44,949 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=openstack.org/operator-name) 2026-01-22 12:19:44,953 p=33295 u=zuul n=ansible | skipping: [localhost] => (item=app.kubernetes.io/name=rabbitmq-cluster-operator) 2026-01-22 12:19:44,955 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,969 p=33295 u=zuul n=ansible | TASK [Update containers in deployed OSP operators name=update_containers] ****** 2026-01-22 12:19:44,969 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.050) 0:14:57.678 ****** 2026-01-22 12:19:44,969 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.050) 0:14:57.676 ****** 2026-01-22 12:19:44,989 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:44,998 p=33295 u=zuul n=ansible | TASK [Update containers in deployed OSP operators using set_openstack_containers role name=set_openstack_containers] *** 2026-01-22 12:19:44,998 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.028) 0:14:57.707 ****** 2026-01-22 12:19:44,998 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:44 +0000 (0:00:00.028) 0:14:57.705 ****** 2026-01-22 12:19:45,018 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,027 p=33295 u=zuul n=ansible | TASK [Configure LVMS Storage Class name=ci_lvms_storage] *********************** 2026-01-22 12:19:45,028 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.029) 0:14:57.737 ****** 2026-01-22 12:19:45,028 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.029) 0:14:57.735 ****** 2026-01-22 12:19:45,049 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,061 p=33295 u=zuul n=ansible | TASK [Execute deployment steps name=kustomize_deploy, tasks_from=execute_step.yml, apply={'tags': ['edpm_deploy']}] *** 2026-01-22 12:19:45,061 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:57.770 ****** 2026-01-22 12:19:45,061 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:57.768 ****** 2026-01-22 12:19:45,086 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,097 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Get CA bundle data with retries] ****************** 2026-01-22 12:19:45,097 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.036) 0:14:57.807 ****** 2026-01-22 12:19:45,097 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.036) 0:14:57.805 ****** 2026-01-22 12:19:45,128 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,136 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Set _ca_bundle fact if CA returned from OCP] ****** 2026-01-22 12:19:45,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.039) 0:14:57.846 ****** 2026-01-22 12:19:45,137 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.039) 0:14:57.844 ****** 2026-01-22 12:19:45,165 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,176 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Creating tls-ca-bundle.pem from CA bundle dest={{ cifmw_install_openstack_ca_file_full_path }}, content={{ _ca_bundle }}, mode=0644] *** 2026-01-22 12:19:45,177 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.039) 0:14:57.886 ****** 2026-01-22 12:19:45,177 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.040) 0:14:57.884 ****** 2026-01-22 12:19:45,198 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,209 p=33295 u=zuul n=ansible | TASK [install_openstack_ca : Check if OpenStackControlplane CA file is present path={{ cifmw_install_openstack_ca_file_full_path }}, get_attributes=False, get_checksum=False, get_mime=False] *** 2026-01-22 12:19:45,209 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.032) 0:14:57.919 ****** 2026-01-22 12:19:45,210 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.032) 0:14:57.917 ****** 2026-01-22 12:19:45,231 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,242 p=33295 u=zuul n=ansible | TASK [Call install_ca role to inject OpenStackControlplane CA file if present role=install_ca] *** 2026-01-22 12:19:45,243 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:57.952 ****** 2026-01-22 12:19:45,243 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:57.950 ****** 2026-01-22 12:19:45,264 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,285 p=33295 u=zuul n=ansible | TASK [cifmw_setup : Run nova host discover process _raw_params=oc rsh -n {{ cifmw_openstack_namespace }} nova-cell0-conductor-0 nova-manage cell_v2 discover_hosts --verbose] *** 2026-01-22 12:19:45,285 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.042) 0:14:57.995 ****** 2026-01-22 12:19:45,285 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.042) 0:14:57.993 ****** 2026-01-22 12:19:45,309 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,318 p=33295 u=zuul n=ansible | TASK [run_hook : Assert parameters are valid quiet=True, that=['_list_hooks is not string', '_list_hooks is not mapping', '_list_hooks is iterable', '(hooks | default([])) is not string', '(hooks | default([])) is not mapping', '(hooks | default([])) is iterable']] *** 2026-01-22 12:19:45,318 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:58.028 ****** 2026-01-22 12:19:45,318 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:58.026 ****** 2026-01-22 12:19:45,340 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,350 p=33295 u=zuul n=ansible | TASK [run_hook : Assert single hooks are all mappings quiet=True, that=['_not_mapping_hooks | length == 0'], msg=All single hooks must be a list of mappings or a mapping.] *** 2026-01-22 12:19:45,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.031) 0:14:58.059 ****** 2026-01-22 12:19:45,350 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.031) 0:14:58.057 ****** 2026-01-22 12:19:45,374 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,384 p=33295 u=zuul n=ansible | TASK [run_hook : Loop on hooks for post_deploy _raw_params={{ hook.type }}.yml] *** 2026-01-22 12:19:45,384 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.034) 0:14:58.093 ****** 2026-01-22 12:19:45,384 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.034) 0:14:58.091 ****** 2026-01-22 12:19:45,467 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,501 p=33295 u=zuul n=ansible | TASK [Run validations name=validations] **************************************** 2026-01-22 12:19:45,502 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.117) 0:14:58.211 ****** 2026-01-22 12:19:45,502 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.117) 0:14:58.209 ****** 2026-01-22 12:19:45,524 p=33295 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:19:45,535 p=33295 u=zuul n=ansible | TASK [Copy kubeconfig to .kube folder where oc expects it src={{ cifmw_openshift_kubeconfig }}, dest=/home/zuul/.kube/config, remote_src=True] *** 2026-01-22 12:19:45,536 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:58.245 ****** 2026-01-22 12:19:45,536 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.033) 0:14:58.243 ****** 2026-01-22 12:19:45,823 p=33295 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:19:45,832 p=33295 u=zuul n=ansible | TASK [Make sure all Openstack operators are deployed _raw_params=set -o pipefail && oc get csv -l operators.coreos.com/openstack-operator.openstack-operators -n "openstack-operators" --no-headers=true | grep -i "succeeded" ] *** 2026-01-22 12:19:45,832 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.296) 0:14:58.541 ****** 2026-01-22 12:19:45,832 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:45 +0000 (0:00:00.296) 0:14:58.539 ****** 2026-01-22 12:19:46,199 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:46,218 p=33295 u=zuul n=ansible | TASK [Get the name of the control plane deployed by 06-deploy-edpm.yml _raw_params=oc get -n openstack openstackcontrolplane -o name] *** 2026-01-22 12:19:46,218 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:46 +0000 (0:00:00.386) 0:14:58.928 ****** 2026-01-22 12:19:46,219 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:46 +0000 (0:00:00.386) 0:14:58.926 ****** 2026-01-22 12:19:46,598 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:46,608 p=33295 u=zuul n=ansible | TASK [Delete control plane deployed by 06-deploy-edpm.yml _raw_params=oc delete -n openstack "{{ control_plane.stdout }}"] *** 2026-01-22 12:19:46,608 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:46 +0000 (0:00:00.389) 0:14:59.317 ****** 2026-01-22 12:19:46,608 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:46 +0000 (0:00:00.389) 0:14:59.315 ****** 2026-01-22 12:19:47,414 p=33295 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:19:47,444 p=33295 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2026-01-22 12:19:47,444 p=33295 u=zuul n=ansible | localhost : ok=187 changed=71 unreachable=0 failed=0 skipped=223 rescued=0 ignored=0 2026-01-22 12:19:47,444 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:47 +0000 (0:00:00.836) 0:15:00.154 ****** 2026-01-22 12:19:47,444 p=33295 u=zuul n=ansible | =============================================================================== 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | edpm_prepare : Wait for OpenStack controlplane to be deployed --------- 323.11s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | install_yamls_makes : Run openstack ----------------------------------- 185.36s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | install_yamls_makes : Run openstack_init ------------------------------ 104.34s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | edpm_prepare : Wait for OpenStack subscription creation ---------------- 60.96s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | ci_setup : Install needed packages ------------------------------------- 43.35s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | run_hook : Run hook without retry - Download needed tools -------------- 34.25s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | edpm_prepare : Wait for control plane to change its status ------------- 30.07s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | install_yamls_makes : Run crc_storage ---------------------------------- 21.83s 2026-01-22 12:19:47,445 p=33295 u=zuul n=ansible | repo_setup : Initialize python venv and install requirements ------------ 9.49s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_local_storage : Perform action in the PV directory ------------------- 5.51s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_setup : Install openshift client ------------------------------------- 5.30s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_yamls_makes : Run netconfig_deploy ------------------------------ 4.63s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | run_hook : Run hook without retry - Fetch compute facts ----------------- 3.03s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_local_storage : Fetch hostnames for all hosts ------------------------ 2.55s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | openshift_setup : Create required namespaces ---------------------------- 1.80s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_ca : Update ca bundle ------------------------------------------- 1.63s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | openshift_setup : Allow anonymous image-pulls in CRC registry for targeted namespaces --- 1.61s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_ca : Update ca bundle ------------------------------------------- 1.57s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_yamls_makes : Run openstack_deploy_prep ------------------------- 1.31s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | openshift_setup : Get internal OpenShift registry route ----------------- 1.17s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | Thursday 22 January 2026 12:19:47 +0000 (0:00:00.837) 0:15:00.153 ****** 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | =============================================================================== 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | edpm_prepare ---------------------------------------------------------- 418.37s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_yamls_makes --------------------------------------------------- 319.07s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_setup --------------------------------------------------------------- 50.94s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | run_hook --------------------------------------------------------------- 42.38s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | repo_setup ------------------------------------------------------------- 17.19s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_local_storage ------------------------------------------------------- 12.95s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | openshift_setup --------------------------------------------------------- 9.12s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ci_gen_kustomize_values ------------------------------------------------- 4.51s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_ca -------------------------------------------------------------- 4.51s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | openshift_login --------------------------------------------------------- 4.00s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_yamls ----------------------------------------------------------- 3.18s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | cifmw_setup ------------------------------------------------------------- 2.55s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | kustomize_deploy -------------------------------------------------------- 1.40s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | install_openstack_ca ---------------------------------------------------- 1.32s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ansible.builtin.command ------------------------------------------------- 1.23s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | ansible.builtin.template ------------------------------------------------ 1.18s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | gather_facts ------------------------------------------------------------ 1.14s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | cifmw_helpers ----------------------------------------------------------- 1.10s 2026-01-22 12:19:47,446 p=33295 u=zuul n=ansible | edpm_deploy_baremetal --------------------------------------------------- 0.76s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | discover_latest_image --------------------------------------------------- 0.68s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | edpm_deploy ------------------------------------------------------------- 0.55s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | networking_mapper ------------------------------------------------------- 0.46s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.shell --------------------------------------------------- 0.39s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.include_role -------------------------------------------- 0.34s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | libvirt_manager --------------------------------------------------------- 0.30s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.copy ---------------------------------------------------- 0.30s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.set_fact ------------------------------------------------ 0.08s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.lineinfile ---------------------------------------------- 0.07s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ansible.builtin.include_vars -------------------------------------------- 0.06s 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2026-01-22 12:19:47,447 p=33295 u=zuul n=ansible | total ----------------------------------------------------------------- 900.11s 2026-01-22 12:22:23,368 p=38901 u=zuul n=ansible | PLAY [all] ********************************************************************* 2026-01-22 12:22:23,427 p=38901 u=zuul n=ansible | TASK [Deploy standalone name=install_yamls_makes, tasks_from=make_standalone_deploy.yml] *** 2026-01-22 12:22:23,427 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.061) 0:00:00.061 ****** 2026-01-22 12:22:23,427 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.060) 0:00:00.060 ****** 2026-01-22 12:22:23,463 p=38901 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_standalone_deploy_env var=make_standalone_deploy_env] *** 2026-01-22 12:22:23,464 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.036) 0:00:00.098 ****** 2026-01-22 12:22:23,464 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.036) 0:00:00.097 ****** 2026-01-22 12:22:23,482 p=38901 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:22:23,493 p=38901 u=zuul n=ansible | TASK [install_yamls_makes : Debug make_standalone_deploy_params var=make_standalone_deploy_params] *** 2026-01-22 12:22:23,493 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.029) 0:00:00.127 ****** 2026-01-22 12:22:23,493 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.029) 0:00:00.126 ****** 2026-01-22 12:22:23,543 p=38901 u=zuul n=ansible | ok: [localhost] => make_standalone_deploy_params: BARBICAN_SERVICE_ENABLED: 'false' DATAPLANE_DNS_SERVER: 192.168.122.10 DNS_DOMAIN: ooo.test EDPM_COMPUTE_CEPH_ENABLED: 'false' EDPM_COMPUTE_CEPH_NOVA: 'false' EDPM_COMPUTE_NETWORK_IP: 192.168.122 GATEWAY: 192.168.122.10 HOST_PRIMARY_RESOLV_CONF_ENTRY: 192.168.122.10 IP: 192.168.122.100 IP_ADRESS_SUFFIX: '100' NTP_SERVER: pool.ntp.org OCTAVIA_ENABLED: 'true' OS_NET_CONFIG_IFACE: nic2 REPO_SETUP_CMDS: /home/zuul/cdn_subscription_repos.sh SSH_KEY_FILE: /home/zuul/.ssh/id_rsa STANDALONE_VM: 'false' SWIFT_REPLICATED: 'false' TELEMETRY_ENABLED: 'true' TLS_ENABLED: 'true' 2026-01-22 12:22:23,555 p=38901 u=zuul n=ansible | TASK [install_yamls_makes : Run standalone_deploy output_dir={{ cifmw_basedir }}/artifacts, chdir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup, script=make standalone_deploy, dry_run={{ make_standalone_deploy_dryrun|default(false)|bool }}, extra_args={{ dict((make_standalone_deploy_env|default({})), **(make_standalone_deploy_params|default({}))) }}] *** 2026-01-22 12:22:23,555 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.061) 0:00:00.189 ****** 2026-01-22 12:22:23,555 p=38901 u=zuul n=ansible | Thursday 22 January 2026 12:22:23 +0000 (0:00:00.061) 0:00:00.188 ****** 2026-01-22 12:22:23,613 p=38901 u=zuul n=ansible | Follow script's output here: /home/zuul/ci-framework-data/logs/ci_script_011_run_standalone.log 2026-01-22 13:14:26,973 p=38901 u=zuul n=ansible | [WARNING]: conditional statements should not include jinja2 templating delimiters such as {{ }} or {% %}. Found: {{ make_standalone_deploy_until | default(true) }} 2026-01-22 13:14:27,034 p=38901 u=zuul n=ansible | changed: [localhost] 2026-01-22 13:14:27,124 p=38901 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2026-01-22 13:14:27,124 p=38901 u=zuul n=ansible | localhost : ok=2 changed=1 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0 2026-01-22 13:14:27,124 p=38901 u=zuul n=ansible | Thursday 22 January 2026 13:14:27 +0000 (0:52:03.569) 0:52:03.758 ****** 2026-01-22 13:14:27,124 p=38901 u=zuul n=ansible | =============================================================================== 2026-01-22 13:14:27,124 p=38901 u=zuul n=ansible | install_yamls_makes : Run standalone_deploy -------------------------- 3123.57s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | install_yamls_makes : Debug make_standalone_deploy_params --------------- 0.06s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | Deploy standalone ------------------------------------------------------- 0.04s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | install_yamls_makes : Debug make_standalone_deploy_env ------------------ 0.03s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | Thursday 22 January 2026 13:14:27 +0000 (0:52:03.569) 0:52:03.758 ****** 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | =============================================================================== 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | install_yamls_makes -------------------------------------------------- 3123.66s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | ansible.builtin.include_role -------------------------------------------- 0.04s 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2026-01-22 13:14:27,125 p=38901 u=zuul n=ansible | total ---------------------------------------------------------------- 3123.70s home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_002_login_into_openshift_internal.log0000644000175000017500000000002115134411334032556 0ustar zuulzuulLogin Succeeded! home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_003_run_hook_without_retry_fetch.log0000644000175000017500000003036015134411344032462 0ustar zuulzuul[WARNING]: Invalid characters were found in group names but not replaced, use -vvvv to see details [WARNING]: Found variable using reserved name: namespace PLAY [Sync repos for controller to compute for periodic jobs and gating repo] *** skipping: no hosts matched PLAY [Build dataset hook] ****************************************************** TASK [Load parameters dir={{ item }}, ignore_unknown_extensions=True, extensions=['yaml', 'yml']] *** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.045) 0:00:00.045 ****** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.044) 0:00:00.044 ****** ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) ok: [localhost] => (item=/etc/ci/env) TASK [Ensure CRC hostname is set _crc_hostname={{ cifmw_crc_hostname | default('crc') }}] *** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.130) 0:00:00.175 ****** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.130) 0:00:00.175 ****** ok: [localhost] TASK [Check we have some compute in inventory computes_len={{ groups['computes'] | default([]) | length }}] *** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.042) 0:00:00.217 ****** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.042) 0:00:00.217 ****** ok: [localhost] TASK [Ensure that the isolated net was configured for crc that=['crc_ci_bootstrap_networks_out is defined', 'crc_ci_bootstrap_networks_out[_crc_hostname] is defined', "crc_ci_bootstrap_networks_out[_crc_hostname]['default'] is defined"]] *** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.041) 0:00:00.258 ****** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.041) 0:00:00.258 ****** ok: [localhost] => changed: false msg: All assertions passed TASK [Ensure we have needed bits for compute when needed that=['crc_ci_bootstrap_networks_out[_first_compute] is defined', "crc_ci_bootstrap_networks_out[_first_compute]['default'] is defined"]] *** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.035) 0:00:00.294 ****** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.035) 0:00:00.293 ****** skipping: [localhost] TASK [Set facts for further usage within the framework cifmw_edpm_prepare_extra_vars={'NNCP_INTERFACE': '{{ crc_ci_bootstrap_networks_out[_crc_hostname].default.iface }}', 'NNCP_DNS_SERVER': "{{\n cifmw_nncp_dns_server |\n default(crc_ci_bootstrap_networks_out[_crc_hostname].default.ip) |\n split('/') | first\n}}", 'NETWORK_MTU': '{{ crc_ci_bootstrap_networks_out[_crc_hostname].default.mtu }}'}] *** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.041) 0:00:00.335 ****** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.041) 0:00:00.334 ****** ok: [localhost] TASK [Ensure the kustomizations dirs exists path={{ cifmw_basedir }}/artifacts/manifests/kustomizations/{{ item }}, state=directory, mode=0755] *** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.045) 0:00:00.380 ****** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.045) 0:00:00.379 ****** changed: [localhost] => (item=dataplane) changed: [localhost] => (item=controlplane) TASK [Create OpenStackControlPlane CR Kustomization dest={{ cifmw_basedir }}/artifacts/manifests/kustomizations/controlplane/99-kustomization.yaml, content=apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: {{ namespace }} patches: - target: kind: OpenStackControlPlane patch: |- - op: replace path: /spec/dns/template/options value: [ { "key": "server", "values": [ "192.168.122.10" ] }, { "key": "no-negcache", "values": [] } ], mode=0644] *** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.448) 0:00:00.829 ****** Thursday 22 January 2026 12:06:58 +0000 (0:00:00.448) 0:00:00.828 ****** changed: [localhost] TASK [Set specific fact for compute accesses cifmw_edpm_deploy_extra_vars={{ edpm_install_yamls_vars }}] *** Thursday 22 January 2026 12:06:59 +0000 (0:00:00.671) 0:00:01.500 ****** Thursday 22 January 2026 12:06:59 +0000 (0:00:00.671) 0:00:01.499 ****** skipping: [localhost] TASK [Create EDPM CR Kustomization mode=0644, dest={{ cifmw_basedir }}/artifacts/manifests/kustomizations/dataplane/99-kustomization.yaml, content=apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: {{ namespace }} patches: - target: kind: OpenStackDataPlaneNodeSet patch: |- {% for compute_node in groups['computes'] %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/hostName value: "{{compute_node}}" {% endfor %} - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/neutron_public_interface_name value: "{{ crc_ci_bootstrap_networks_out[_first_compute].default.iface | default('') }}" {% for compute_node in groups['computes'] %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/networks/0/defaultRoute value: false {% endfor %} {% for compute_node in groups['computes'] if compute_node != _first_compute %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/ansible/ansibleHost value: >- {{ crc_ci_bootstrap_networks_out[compute_node].default.ip4 | default(crc_ci_bootstrap_networks_out[compute_node].default.ip) | ansible.utils.ipaddr('address') }} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/networks/0/fixedIP value: >- {{ crc_ci_bootstrap_networks_out[compute_node].default.ip4 | default(crc_ci_bootstrap_networks_out[compute_node].default.ip) | ansible.utils.ipaddr('address') }} {% endfor %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_os_net_config_mappings value: net_config_data_lookup: edpm-compute: nic2: "{{ crc_ci_bootstrap_networks_out[_first_compute].default.iface | default('ens7') }}" - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_network_config_debug value: true - op: add path: /spec/env value: {} - op: add path: /spec/env value: - name: "ANSIBLE_VERBOSITY" value: "2" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/edpm_network_config_template value: |- {%- raw %} --- {% set mtu_list = [ctlplane_mtu] %} {% for network in nodeset_networks %} {% set _ = mtu_list.append(lookup('vars', networks_lower[network] ~ '_mtu')) %} {%- endfor %} {% set min_viable_mtu = mtu_list | max %} network_config: - type: interface name: nic1 use_dhcp: true mtu: {{ min_viable_mtu }} - type: ovs_bridge name: {{ neutron_physical_bridge_name }} mtu: {{ min_viable_mtu }} use_dhcp: false dns_servers: {{ ctlplane_dns_nameservers }} domain: {{ dns_search_domains }} addresses: - ip_netmask: {{ ctlplane_ip }}/{{ ctlplane_cidr }} routes: {{ ctlplane_host_routes }} members: - type: interface name: nic2 mtu: {{ min_viable_mtu }} # force the MAC address of the bridge to this interface primary: true {% if edpm_network_config_nmstate | bool %} # this ovs_extra configuration fixes OSPRH-17551, but it will be not needed when FDP-1472 is resolved ovs_extra: - "set interface eth1 external-ids:ovn-egress-iface=true" {% endif %} {% for network in nodeset_networks %} - type: vlan mtu: {{ lookup('vars', networks_lower[network] ~ '_mtu') }} vlan_id: {{ lookup('vars', networks_lower[network] ~ '_vlan_id') }} addresses: - ip_netmask: {{ lookup('vars', networks_lower[network] ~ '_ip') }}/{{ lookup('vars', networks_lower[network] ~ '_cidr') }} routes: {{ lookup('vars', networks_lower[network] ~ '_host_routes') }} {% endfor %} {% endraw %} - op: replace path: /spec/nodeTemplate/ansible/ansibleUser value: "{{ hostvars[_first_compute].ansible_user | default('zuul') }}" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/ctlplane_dns_nameservers value: {% for dns_server in dns_servers %} - "{{ dns_server }}" {% endfor %} {% if content_provider_registry_ip is defined %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_container_registry_insecure_registries value: ["{{ content_provider_registry_ip }}:5001"] {% endif %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_sshd_allowed_ranges value: ["0.0.0.0/0"] {% if cifmw_hook_fetch_compute_facts_edpm_cmd is defined %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_bootstrap_command value: |- {{ cifmw_hook_fetch_compute_facts_edpm_cmd | indent( width=8) }} {% endif %} {% if cifmw_edpm_telemetry_enabled_exporters is defined and cifmw_edpm_telemetry_enabled_exporters | length > 0 %} - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/edpm_telemetry_enabled_exporters value: {% for exporter in cifmw_edpm_telemetry_enabled_exporters %} - "{{ exporter }}" {% endfor %} {% endif %}] *** Thursday 22 January 2026 12:06:59 +0000 (0:00:00.060) 0:00:01.561 ****** Thursday 22 January 2026 12:06:59 +0000 (0:00:00.060) 0:00:01.560 ****** skipping: [localhost] TASK [Ensure we know about the private host keys _raw_params=ssh-keyscan {{ cifmw_edpm_deploy_extra_vars.DATAPLANE_COMPUTE_IP }} >> ~/.ssh/known_hosts ] *** Thursday 22 January 2026 12:06:59 +0000 (0:00:00.052) 0:00:01.613 ****** Thursday 22 January 2026 12:06:59 +0000 (0:00:00.052) 0:00:01.613 ****** skipping: [localhost] TASK [Save compute info dest={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml, content={{ file_content | to_nice_yaml }}, mode=0644] *** Thursday 22 January 2026 12:06:59 +0000 (0:00:00.047) 0:00:01.661 ****** Thursday 22 January 2026 12:06:59 +0000 (0:00:00.047) 0:00:01.660 ****** changed: [localhost] PLAY RECAP ********************************************************************* localhost : ok=8 changed=3 unreachable=0 failed=0 skipped=4 rescued=0 ignored=0 Thursday 22 January 2026 12:07:00 +0000 (0:00:00.551) 0:00:02.212 ****** =============================================================================== Create OpenStackControlPlane CR Kustomization --------------------------- 0.67s Save compute info ------------------------------------------------------- 0.55s Ensure the kustomizations dirs exists ----------------------------------- 0.45s Load parameters --------------------------------------------------------- 0.13s Set specific fact for compute accesses ---------------------------------- 0.06s Create EDPM CR Kustomization -------------------------------------------- 0.05s Ensure we know about the private host keys ------------------------------ 0.05s Set facts for further usage within the framework ------------------------ 0.05s Ensure CRC hostname is set ---------------------------------------------- 0.04s Ensure we have needed bits for compute when needed ---------------------- 0.04s Check we have some compute in inventory --------------------------------- 0.04s Ensure that the isolated net was configured for crc --------------------- 0.04s Thursday 22 January 2026 12:07:00 +0000 (0:00:00.551) 0:00:02.212 ****** =============================================================================== ansible.builtin.copy ---------------------------------------------------- 1.28s ansible.builtin.file ---------------------------------------------------- 0.45s ansible.builtin.set_fact ------------------------------------------------ 0.19s ansible.builtin.include_vars -------------------------------------------- 0.13s ansible.builtin.assert -------------------------------------------------- 0.08s ansible.builtin.shell --------------------------------------------------- 0.05s ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ total ------------------------------------------------------------------- 2.17s home/zuul/zuul-output/logs/ci-framework-data/logs/pre_deploy_fetch_compute_facts.log0000644000175000017500000004040615134411344030265 0ustar zuulzuul2026-01-22 12:06:58,036 p=35404 u=zuul n=ansible | [WARNING]: Invalid characters were found in group names but not replaced, use -vvvv to see details 2026-01-22 12:06:58,145 p=35404 u=zuul n=ansible | PLAY [Sync repos for controller to compute for periodic jobs and gating repo] *** 2026-01-22 12:06:58,145 p=35404 u=zuul n=ansible | skipping: no hosts matched 2026-01-22 12:06:58,146 p=35404 u=zuul n=ansible | [WARNING]: Found variable using reserved name: namespace 2026-01-22 12:06:58,146 p=35404 u=zuul n=ansible | PLAY [Build dataset hook] ****************************************************** 2026-01-22 12:06:58,188 p=35404 u=zuul n=ansible | TASK [Load parameters dir={{ item }}, ignore_unknown_extensions=True, extensions=['yaml', 'yml']] *** 2026-01-22 12:06:58,188 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.045) 0:00:00.045 ****** 2026-01-22 12:06:58,188 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.044) 0:00:00.044 ****** 2026-01-22 12:06:58,267 p=35404 u=zuul n=ansible | ok: [localhost] => (item=/home/zuul/ci-framework-data/artifacts/parameters) 2026-01-22 12:06:58,300 p=35404 u=zuul n=ansible | ok: [localhost] => (item=/etc/ci/env) 2026-01-22 12:06:58,318 p=35404 u=zuul n=ansible | TASK [Ensure CRC hostname is set _crc_hostname={{ cifmw_crc_hostname | default('crc') }}] *** 2026-01-22 12:06:58,318 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.130) 0:00:00.175 ****** 2026-01-22 12:06:58,318 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.130) 0:00:00.175 ****** 2026-01-22 12:06:58,349 p=35404 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:58,360 p=35404 u=zuul n=ansible | TASK [Check we have some compute in inventory computes_len={{ groups['computes'] | default([]) | length }}] *** 2026-01-22 12:06:58,360 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.042) 0:00:00.217 ****** 2026-01-22 12:06:58,360 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.042) 0:00:00.217 ****** 2026-01-22 12:06:58,389 p=35404 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:58,401 p=35404 u=zuul n=ansible | TASK [Ensure that the isolated net was configured for crc that=['crc_ci_bootstrap_networks_out is defined', 'crc_ci_bootstrap_networks_out[_crc_hostname] is defined', "crc_ci_bootstrap_networks_out[_crc_hostname]['default'] is defined"]] *** 2026-01-22 12:06:58,401 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.041) 0:00:00.258 ****** 2026-01-22 12:06:58,402 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.041) 0:00:00.258 ****** 2026-01-22 12:06:58,426 p=35404 u=zuul n=ansible | ok: [localhost] => changed: false msg: All assertions passed 2026-01-22 12:06:58,436 p=35404 u=zuul n=ansible | TASK [Ensure we have needed bits for compute when needed that=['crc_ci_bootstrap_networks_out[_first_compute] is defined', "crc_ci_bootstrap_networks_out[_first_compute]['default'] is defined"]] *** 2026-01-22 12:06:58,437 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.035) 0:00:00.294 ****** 2026-01-22 12:06:58,437 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.035) 0:00:00.293 ****** 2026-01-22 12:06:58,464 p=35404 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:58,478 p=35404 u=zuul n=ansible | TASK [Set facts for further usage within the framework cifmw_edpm_prepare_extra_vars={'NNCP_INTERFACE': '{{ crc_ci_bootstrap_networks_out[_crc_hostname].default.iface }}', 'NNCP_DNS_SERVER': "{{\n cifmw_nncp_dns_server |\n default(crc_ci_bootstrap_networks_out[_crc_hostname].default.ip) |\n split('/') | first\n}}", 'NETWORK_MTU': '{{ crc_ci_bootstrap_networks_out[_crc_hostname].default.mtu }}'}] *** 2026-01-22 12:06:58,478 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.041) 0:00:00.335 ****** 2026-01-22 12:06:58,478 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.041) 0:00:00.334 ****** 2026-01-22 12:06:58,512 p=35404 u=zuul n=ansible | ok: [localhost] 2026-01-22 12:06:58,523 p=35404 u=zuul n=ansible | TASK [Ensure the kustomizations dirs exists path={{ cifmw_basedir }}/artifacts/manifests/kustomizations/{{ item }}, state=directory, mode=0755] *** 2026-01-22 12:06:58,523 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.045) 0:00:00.380 ****** 2026-01-22 12:06:58,523 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.045) 0:00:00.379 ****** 2026-01-22 12:06:58,798 p=35404 u=zuul n=ansible | changed: [localhost] => (item=dataplane) 2026-01-22 12:06:58,959 p=35404 u=zuul n=ansible | changed: [localhost] => (item=controlplane) 2026-01-22 12:06:58,971 p=35404 u=zuul n=ansible | TASK [Create OpenStackControlPlane CR Kustomization dest={{ cifmw_basedir }}/artifacts/manifests/kustomizations/controlplane/99-kustomization.yaml, content=apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: {{ namespace }} patches: - target: kind: OpenStackControlPlane patch: |- - op: replace path: /spec/dns/template/options value: [ { "key": "server", "values": [ "192.168.122.10" ] }, { "key": "no-negcache", "values": [] } ], mode=0644] *** 2026-01-22 12:06:58,972 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.448) 0:00:00.829 ****** 2026-01-22 12:06:58,972 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:58 +0000 (0:00:00.448) 0:00:00.828 ****** 2026-01-22 12:06:59,628 p=35404 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:06:59,643 p=35404 u=zuul n=ansible | TASK [Set specific fact for compute accesses cifmw_edpm_deploy_extra_vars={{ edpm_install_yamls_vars }}] *** 2026-01-22 12:06:59,643 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:59 +0000 (0:00:00.671) 0:00:01.500 ****** 2026-01-22 12:06:59,643 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:59 +0000 (0:00:00.671) 0:00:01.499 ****** 2026-01-22 12:06:59,683 p=35404 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:59,703 p=35404 u=zuul n=ansible | TASK [Create EDPM CR Kustomization mode=0644, dest={{ cifmw_basedir }}/artifacts/manifests/kustomizations/dataplane/99-kustomization.yaml, content=apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: {{ namespace }} patches: - target: kind: OpenStackDataPlaneNodeSet patch: |- {% for compute_node in groups['computes'] %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/hostName value: "{{compute_node}}" {% endfor %} - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/neutron_public_interface_name value: "{{ crc_ci_bootstrap_networks_out[_first_compute].default.iface | default('') }}" {% for compute_node in groups['computes'] %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/networks/0/defaultRoute value: false {% endfor %} {% for compute_node in groups['computes'] if compute_node != _first_compute %} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/ansible/ansibleHost value: >- {{ crc_ci_bootstrap_networks_out[compute_node].default.ip4 | default(crc_ci_bootstrap_networks_out[compute_node].default.ip) | ansible.utils.ipaddr('address') }} - op: replace path: /spec/nodes/edpm-{{ compute_node }}/networks/0/fixedIP value: >- {{ crc_ci_bootstrap_networks_out[compute_node].default.ip4 | default(crc_ci_bootstrap_networks_out[compute_node].default.ip) | ansible.utils.ipaddr('address') }} {% endfor %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_os_net_config_mappings value: net_config_data_lookup: edpm-compute: nic2: "{{ crc_ci_bootstrap_networks_out[_first_compute].default.iface | default('ens7') }}" - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_network_config_debug value: true - op: add path: /spec/env value: {} - op: add path: /spec/env value: - name: "ANSIBLE_VERBOSITY" value: "2" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/edpm_network_config_template value: |- {%- raw %} --- {% set mtu_list = [ctlplane_mtu] %} {% for network in nodeset_networks %} {% set _ = mtu_list.append(lookup('vars', networks_lower[network] ~ '_mtu')) %} {%- endfor %} {% set min_viable_mtu = mtu_list | max %} network_config: - type: interface name: nic1 use_dhcp: true mtu: {{ min_viable_mtu }} - type: ovs_bridge name: {{ neutron_physical_bridge_name }} mtu: {{ min_viable_mtu }} use_dhcp: false dns_servers: {{ ctlplane_dns_nameservers }} domain: {{ dns_search_domains }} addresses: - ip_netmask: {{ ctlplane_ip }}/{{ ctlplane_cidr }} routes: {{ ctlplane_host_routes }} members: - type: interface name: nic2 mtu: {{ min_viable_mtu }} # force the MAC address of the bridge to this interface primary: true {% if edpm_network_config_nmstate | bool %} # this ovs_extra configuration fixes OSPRH-17551, but it will be not needed when FDP-1472 is resolved ovs_extra: - "set interface eth1 external-ids:ovn-egress-iface=true" {% endif %} {% for network in nodeset_networks %} - type: vlan mtu: {{ lookup('vars', networks_lower[network] ~ '_mtu') }} vlan_id: {{ lookup('vars', networks_lower[network] ~ '_vlan_id') }} addresses: - ip_netmask: {{ lookup('vars', networks_lower[network] ~ '_ip') }}/{{ lookup('vars', networks_lower[network] ~ '_cidr') }} routes: {{ lookup('vars', networks_lower[network] ~ '_host_routes') }} {% endfor %} {% endraw %} - op: replace path: /spec/nodeTemplate/ansible/ansibleUser value: "{{ hostvars[_first_compute].ansible_user | default('zuul') }}" - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/ctlplane_dns_nameservers value: {% for dns_server in dns_servers %} - "{{ dns_server }}" {% endfor %} {% if content_provider_registry_ip is defined %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_container_registry_insecure_registries value: ["{{ content_provider_registry_ip }}:5001"] {% endif %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_sshd_allowed_ranges value: ["0.0.0.0/0"] {% if cifmw_hook_fetch_compute_facts_edpm_cmd is defined %} - op: add path: /spec/nodeTemplate/ansible/ansibleVars/edpm_bootstrap_command value: |- {{ cifmw_hook_fetch_compute_facts_edpm_cmd | indent( width=8) }} {% endif %} {% if cifmw_edpm_telemetry_enabled_exporters is defined and cifmw_edpm_telemetry_enabled_exporters | length > 0 %} - op: replace path: /spec/nodeTemplate/ansible/ansibleVars/edpm_telemetry_enabled_exporters value: {% for exporter in cifmw_edpm_telemetry_enabled_exporters %} - "{{ exporter }}" {% endfor %} {% endif %}] *** 2026-01-22 12:06:59,704 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:59 +0000 (0:00:00.060) 0:00:01.561 ****** 2026-01-22 12:06:59,704 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:59 +0000 (0:00:00.060) 0:00:01.560 ****** 2026-01-22 12:06:59,740 p=35404 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:59,756 p=35404 u=zuul n=ansible | TASK [Ensure we know about the private host keys _raw_params=ssh-keyscan {{ cifmw_edpm_deploy_extra_vars.DATAPLANE_COMPUTE_IP }} >> ~/.ssh/known_hosts ] *** 2026-01-22 12:06:59,756 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:59 +0000 (0:00:00.052) 0:00:01.613 ****** 2026-01-22 12:06:59,756 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:59 +0000 (0:00:00.052) 0:00:01.613 ****** 2026-01-22 12:06:59,787 p=35404 u=zuul n=ansible | skipping: [localhost] 2026-01-22 12:06:59,804 p=35404 u=zuul n=ansible | TASK [Save compute info dest={{ cifmw_basedir }}/artifacts/{{ step }}_{{ hook_name }}.yml, content={{ file_content | to_nice_yaml }}, mode=0644] *** 2026-01-22 12:06:59,804 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:59 +0000 (0:00:00.047) 0:00:01.661 ****** 2026-01-22 12:06:59,804 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:06:59 +0000 (0:00:00.047) 0:00:01.660 ****** 2026-01-22 12:07:00,292 p=35404 u=zuul n=ansible | changed: [localhost] 2026-01-22 12:07:00,355 p=35404 u=zuul n=ansible | PLAY RECAP ********************************************************************* 2026-01-22 12:07:00,355 p=35404 u=zuul n=ansible | localhost : ok=8 changed=3 unreachable=0 failed=0 skipped=4 rescued=0 ignored=0 2026-01-22 12:07:00,355 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.551) 0:00:02.212 ****** 2026-01-22 12:07:00,355 p=35404 u=zuul n=ansible | =============================================================================== 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Create OpenStackControlPlane CR Kustomization --------------------------- 0.67s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Save compute info ------------------------------------------------------- 0.55s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Ensure the kustomizations dirs exists ----------------------------------- 0.45s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Load parameters --------------------------------------------------------- 0.13s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Set specific fact for compute accesses ---------------------------------- 0.06s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Create EDPM CR Kustomization -------------------------------------------- 0.05s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Ensure we know about the private host keys ------------------------------ 0.05s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Set facts for further usage within the framework ------------------------ 0.05s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Ensure CRC hostname is set ---------------------------------------------- 0.04s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Ensure we have needed bits for compute when needed ---------------------- 0.04s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Check we have some compute in inventory --------------------------------- 0.04s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Ensure that the isolated net was configured for crc --------------------- 0.04s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | Thursday 22 January 2026 12:07:00 +0000 (0:00:00.551) 0:00:02.212 ****** 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | =============================================================================== 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | ansible.builtin.copy ---------------------------------------------------- 1.28s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | ansible.builtin.file ---------------------------------------------------- 0.45s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | ansible.builtin.set_fact ------------------------------------------------ 0.19s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | ansible.builtin.include_vars -------------------------------------------- 0.13s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | ansible.builtin.assert -------------------------------------------------- 0.08s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | ansible.builtin.shell --------------------------------------------------- 0.05s 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 2026-01-22 12:07:00,356 p=35404 u=zuul n=ansible | total ------------------------------------------------------------------- 2.17s home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_004_run_crc.log0000644000175000017500000002156415134411407026117 0ustar zuulzuul~/src/github.com/openstack-k8s-operators/install_yamls ~/ci-framework-data/artifacts error: the server doesn't have a resource type "openstackversion" bash scripts/gen-namespace.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests ']' + '[' -z crc-storage ']' + OUT_DIR=/home/zuul/ci-framework-data/artifacts/manifests/crc-storage + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/crc-storage ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/crc-storage + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/crc-storage/namespace.yaml namespace/crc-storage created timeout 500s bash -c "while ! (oc get project.v1.project.openshift.io crc-storage); do sleep 1; done" NAME DISPLAY NAME STATUS crc-storage Active oc project crc-storage Now using project "crc-storage" on server "https://api.crc.testing:6443". bash scripts/create-pv.sh +++ dirname scripts/create-pv.sh ++ cd scripts ++ pwd -P + SCRIPTPATH=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/scripts + . /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/scripts/storage_common.sh ++ set -ex ++ OPERATION=create ++ cat ++ oc apply -f - configmap/crc-storage created ++ oc apply -f - ++ cat serviceaccount/crc-storage created ++ cat ++ oc apply -f - role.rbac.authorization.k8s.io/crc-storage-role created ++ cat ++ oc apply -f - rolebinding.rbac.authorization.k8s.io/crc-storage-rolebinding created + PV_NUM=12 + TIMEOUT=500s ++ oc get pv -o json ++ jq -r '.items[] | select(.status.phase | test("Released")).metadata.name' + released= ++ oc get node -o template --template '{{range .items}}{{.metadata.name}}{{"\n"}}{{end}}' -l node-role.kubernetes.io/worker + NODE_NAMES=crc + '[' -z crc ']' + for node in $NODE_NAMES + . /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/scripts/storage_apply.sh crc create ++ set -ex ++ NODE=crc ++ OPERATION=create ++ oc delete -n crc-storage job crc-storage-crc --ignore-not-found ++ cat ++ oc apply -f - Warning: would violate PodSecurity "restricted:latest": privileged (container "storage" must not set securityContext.privileged=true), allowPrivilegeEscalation != false (container "storage" must set securityContext.allowPrivilegeEscalation=false), unrestricted capabilities (container "storage" must set securityContext.capabilities.drop=["ALL"]), restricted volume types (volume "node-mnt" uses restricted volume type "hostPath"), runAsNonRoot != true (pod or container "storage" must set securityContext.runAsNonRoot=true), runAsUser=0 (pod and container "storage" must not set runAsUser=0) job.batch/crc-storage-crc created + oc wait job -n crc-storage -l install-yamls.crc.storage --for condition=Complete --timeout 500s job.batch/crc-storage-crc condition met bash scripts/gen-crc-pv-kustomize.sh + OUT=/home/zuul/ci-framework-data/artifacts/manifests + '[' -z '"local-storage"' ']' + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/crc ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/crc + PV_NUM=12 + STORAGE_CAPACITY=10 ++ oc get node -o name -l node-role.kubernetes.io/worker ++ sed -e 's|node/||' ++ head -c-1 ++ tr '\n' ' ' + NODE_NAMES=crc + '[' -z crc ']' + cat + for node in $NODE_NAMES ++ seq -w 12 + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + for i in `seq -w $PV_NUM` + cat ++ sed -e 's/^"//' -e 's/"$//' + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/crc/storage.yaml Warning: resource storageclasses/local-storage is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. storageclass.storage.k8s.io/local-storage configured Warning: resource persistentvolumes/local-storage01-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage01-crc configured Warning: resource persistentvolumes/local-storage02-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage02-crc configured Warning: resource persistentvolumes/local-storage03-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage03-crc configured Warning: resource persistentvolumes/local-storage04-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage04-crc configured Warning: resource persistentvolumes/local-storage05-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage05-crc configured Warning: resource persistentvolumes/local-storage06-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage06-crc configured Warning: resource persistentvolumes/local-storage07-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage07-crc configured Warning: resource persistentvolumes/local-storage08-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage08-crc configured Warning: resource persistentvolumes/local-storage09-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage09-crc configured Warning: resource persistentvolumes/local-storage10-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage10-crc configured Warning: resource persistentvolumes/local-storage11-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage11-crc configured Warning: resource persistentvolumes/local-storage12-crc is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. persistentvolume/local-storage12-crc configured persistentvolumeclaim/ansible-ee-logs created ~/ci-framework-data/artifacts home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_005_run.log0000644000175000017500000000424715134411411025263 0ustar zuulzuul~/src/github.com/openstack-k8s-operators/install_yamls ~/ci-framework-data/artifacts error: the server doesn't have a resource type "openstackversion" bash scripts/gen-namespace.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests ']' + '[' -z openstack ']' + OUT_DIR=/home/zuul/ci-framework-data/artifacts/manifests/openstack + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack ']' + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack/namespace.yaml Warning: resource namespaces/openstack is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. namespace/openstack configured timeout 500s bash -c "while ! (oc get project.v1.project.openshift.io openstack); do sleep 1; done" NAME DISPLAY NAME STATUS openstack Active oc project openstack Now using project "openstack" on server "https://api.crc.testing:6443". bash scripts/gen-input-kustomize.sh + OUT=/home/zuul/ci-framework-data/artifacts/manifests + '[' -z openstack ']' + '[' -z osp-secret ']' + '[' -z 12345678 ']' + '[' -z 1234567842 ']' + '[' -z 767c3ed056cbaa3b9dfedb8c6f825bf0 ']' + '[' -z sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU= ']' + '[' -z COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f ']' + '[' -z openstack ']' + '[' -z libvirt-secret ']' + DIR=/home/zuul/ci-framework-data/artifacts/manifests/openstack/input + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack/input ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/openstack/input + pushd /home/zuul/ci-framework-data/artifacts/manifests/openstack/input ~/ci-framework-data/artifacts/manifests/openstack/input ~/src/github.com/openstack-k8s-operators/install_yamls + cat oc get secret/osp-secret || oc kustomize /home/zuul/ci-framework-data/artifacts/manifests/openstack/input | oc apply -f - Error from server (NotFound): secrets "osp-secret" not found secret/libvirt-secret created secret/octavia-ca-passphrase created secret/osp-secret created ~/ci-framework-data/artifacts home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_006_run.log0000644000175000017500000006026215134411702025266 0ustar zuulzuul~/src/github.com/openstack-k8s-operators/install_yamls ~/ci-framework-data/artifacts error: the server doesn't have a resource type "openstackversion" bash scripts/validate-marketplace.sh + '[' -z 500s ']' + OPERATOR_NAMESPACE=openshift-marketplace ++ oc get pods --no-headers -n openshift-marketplace ++ grep -viE 'running|completed' + not_running_pods= + '[' -z '' ']' + echo 'All openshift-marketplace pods seems to me fine' All openshift-marketplace pods seems to me fine + OPERATORS='openshift-cert-manager-operator kubernetes-nmstate-operator metallb-operator' + for operator in $OPERATORS + n=0 + retries=20 + true + oc get packagemanifests -n openshift-marketplace + grep openshift-cert-manager-operator openshift-cert-manager-operator Red Hat Operators 333d + '[' 0 -eq 0 ']' + break + for operator in $OPERATORS + n=0 + retries=20 + true + oc get packagemanifests -n openshift-marketplace + grep kubernetes-nmstate-operator kubernetes-nmstate-operator Red Hat Operators 333d + '[' 0 -eq 0 ']' + break + for operator in $OPERATORS + n=0 + retries=20 + true + oc get packagemanifests -n openshift-marketplace + grep metallb-operator metallb-operator Red Hat Operators 333d + '[' 0 -eq 0 ']' + break bash scripts/gen-namespace.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests ']' + '[' -z openshift-nmstate ']' + OUT_DIR=/home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate/namespace.yaml namespace/openshift-nmstate created timeout 500s bash -c "while ! (oc get project.v1.project.openshift.io openshift-nmstate); do sleep 1; done" NAME DISPLAY NAME STATUS openshift-nmstate Active bash scripts/gen-olm-nmstate.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op ']' + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op + '[' -z /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/cr ']' + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/cr ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/cr + echo OPERATOR_DIR /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op OPERATOR_DIR /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op + echo DEPLOY_DIR /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/cr DEPLOY_DIR /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/cr + cat + cat + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op operatorgroup.operators.coreos.com/openshift-nmstate-tn6k8 created subscription.operators.coreos.com/kubernetes-nmstate-operator created timeout 500s bash -c "while ! (oc get deployments/nmstate-operator -n openshift-nmstate); do sleep 10; done" Error from server (NotFound): deployments.apps "nmstate-operator" not found Error from server (NotFound): deployments.apps "nmstate-operator" not found NAME READY UP-TO-DATE AVAILABLE AGE nmstate-operator 1/1 1 1 9s oc wait deployments/nmstate-operator -n openshift-nmstate --for condition=Available --timeout=500s deployment.apps/nmstate-operator condition met timeout 500s bash -c "while ! (oc wait pod -n openshift-apiserver -l apiserver=true --for condition=Ready); do sleep 10; done" pod/apiserver-76f77b778f-xqd5z condition met oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/cr nmstate.nmstate.io/nmstate created timeout 500s bash -c "while ! (oc get pod --no-headers=true -l component=kubernetes-nmstate-handler -n openshift-nmstate| grep nmstate-handler); do sleep 10; done" No resources found in openshift-nmstate namespace. nmstate-handler-xz7xx 0/1 Running 0 10s oc wait pod -n openshift-nmstate -l component=kubernetes-nmstate-handler --for condition=Ready --timeout=500s pod/nmstate-handler-xz7xx condition met timeout 500s bash -c "while ! (oc get deployments/nmstate-webhook -n openshift-nmstate); do sleep 10; done" NAME READY UP-TO-DATE AVAILABLE AGE nmstate-webhook 0/1 1 0 10s oc wait deployments/nmstate-webhook -n openshift-nmstate --for condition=Available --timeout=500s deployment.apps/nmstate-webhook condition met WORKERS='crc' \ bash scripts/gen-nncp.sh + check_var_set DEPLOY_DIR + [[ ! -v DEPLOY_DIR ]] + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack/nncp/cr ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/openstack/nncp/cr + check_var_set WORKERS + [[ ! -v WORKERS ]] + check_var_set INTERFACE + [[ ! -v INTERFACE ]] + check_var_set BRIDGE_NAME + [[ ! -v BRIDGE_NAME ]] + check_var_set INTERFACE_MTU + [[ ! -v INTERFACE_MTU ]] + check_var_set VLAN_START + [[ ! -v VLAN_START ]] + check_var_set VLAN_STEP + [[ ! -v VLAN_STEP ]] + check_var_set VLAN_STEP + [[ ! -v VLAN_STEP ]] + check_var_set INTERNALAPI_PREFIX + [[ ! -v INTERNALAPI_PREFIX ]] + check_var_set STORAGE_PREFIX + [[ ! -v STORAGE_PREFIX ]] + check_var_set STORAGEMGMT_PREFIX + [[ ! -v STORAGEMGMT_PREFIX ]] + check_var_set TENANT_PREFIX + [[ ! -v TENANT_PREFIX ]] + check_var_set DESIGNATE_PREFIX + [[ ! -v DESIGNATE_PREFIX ]] + check_var_set DESIGNATE_EXT_PREFIX + [[ ! -v DESIGNATE_EXT_PREFIX ]] + '[' -n '' ']' + echo DEPLOY_DIR /home/zuul/ci-framework-data/artifacts/manifests/openstack/nncp/cr DEPLOY_DIR /home/zuul/ci-framework-data/artifacts/manifests/openstack/nncp/cr + echo WORKERS crc WORKERS crc + echo INTERFACE ens7 INTERFACE ens7 + echo BRIDGE_NAME ospbr BRIDGE_NAME ospbr + echo INTERFACE_BGP_1 INTERFACE_BGP_1 + echo INTERFACE_BGP_2 INTERFACE_BGP_2 + echo INTERFACE_MTU 1500 INTERFACE_MTU 1500 + echo VLAN_START 20 VLAN_START 20 + echo VLAN_STEP 1 VLAN_STEP 1 + echo STORAGE_MACVLAN STORAGE_MACVLAN + '[' -n true ']' + echo CTLPLANE_IP_ADDRESS_PREFIX 192.168.122 CTLPLANE_IP_ADDRESS_PREFIX 192.168.122 + echo CTLPLANE_IP_ADDRESS_SUFFIX 10 CTLPLANE_IP_ADDRESS_SUFFIX 10 + '[' -n '' ']' + '[' -n '' ']' + IP_ADDRESS_SUFFIX=5 + IPV6_ADDRESS_SUFFIX=5 + rm --force '/home/zuul/ci-framework-data/artifacts/manifests/openstack/nncp/cr/*_nncp.yaml' + internalapi_vlan_id=20 + storage_vlan_id=21 + tenant_vlan_id=22 + storagemgmt_vlan_id=23 + octavia_vlan_id=24 + designate_vlan_id=25 + designate_ext_vlan_id=26 + for WORKER in ${WORKERS} + cat + '[' -n '' ']' + '[' -n '' ']' + '[' -n '' ']' + '[' -n '' ']' + '[' -n '' ']' + '[' -n '' ']' + '[' -n '' ']' + '[' -n '' ']' + '[' -n '' ']' + '[' -n '' ']' + cat + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + cat + '[' -n '' ']' + '[' -n true ']' + cat + '[' -n '' ']' + cat + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + cat + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + '[' -n '' ']' + cat + IP_ADDRESS_SUFFIX=6 + IPV6_ADDRESS_SUFFIX=6 + CTLPLANE_IP_ADDRESS_SUFFIX=11 + CTLPLANE_IPV6_ADDRESS_SUFFIX=1 oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack/nncp/cr/ nodenetworkconfigurationpolicy.nmstate.io/ens7-crc created timeout 240s bash -c "while ! (oc wait nncp -l osp/interface=ens7 --for jsonpath='{.status.conditions[0].reason}'=SuccessfullyConfigured); do sleep 10; done" nodenetworkconfigurationpolicy.nmstate.io/ens7-crc condition met if test -n "192.168.122.10"; then oc patch dns.operator/default --type merge -p '{"spec":{"upstreamResolvers":{"policy":"Sequential","upstreams":[{"type":"Network","address":"'192.168.122.10'","port":53},{"type":"SystemResolvConf"}]}}}'; fi dns.operator.openshift.io/default patched timeout 240s bash -c "while ! (oc wait dns.operator/default --for condition=available); do sleep 10; done" dns.operator.openshift.io/default condition met bash scripts/gen-namespace.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests ']' + '[' -z metallb-system ']' + OUT_DIR=/home/zuul/ci-framework-data/artifacts/manifests/metallb-system + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/metallb-system ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/metallb-system + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/namespace.yaml namespace/metallb-system created timeout 500s bash -c "while ! (oc get project.v1.project.openshift.io metallb-system); do sleep 1; done" NAME DISPLAY NAME STATUS metallb-system Active bash scripts/gen-olm-metallb.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op ']' + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op + '[' -z /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr ']' + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr + echo OPERATOR_DIR /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op OPERATOR_DIR /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op + echo DEPLOY_DIR /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr DEPLOY_DIR /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr + echo INTERFACE INTERFACE + cat + cat + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op operatorgroup.operators.coreos.com/metallb-operator created subscription.operators.coreos.com/metallb-operator-sub created timeout 500s bash -c "while ! (oc get pod --no-headers=true -l control-plane=controller-manager -n metallb-system| grep metallb-operator-controller); do sleep 10; done" No resources found in metallb-system namespace. No resources found in metallb-system namespace. metallb-operator-controller-manager-75c7758c8d-jv4lh 0/1 ContainerCreating 0 5s oc wait pod -n metallb-system --for condition=Ready -l control-plane=controller-manager --timeout=500s pod/metallb-operator-controller-manager-75c7758c8d-jv4lh condition met timeout 500s bash -c "while ! (oc get pod --no-headers=true -l component=webhook-server -n metallb-system| grep metallb-operator-webhook); do sleep 10; done" metallb-operator-webhook-server-789dcb66b5-4k49c 1/1 Running 0 41s oc wait pod -n metallb-system --for condition=Ready -l component=webhook-server --timeout=500s pod/metallb-operator-webhook-server-789dcb66b5-4k49c condition met oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/deploy_operator.yaml metallb.metallb.io/metallb created timeout 500s bash -c "while ! (oc get pod --no-headers=true -l component=speaker -n metallb-system | grep speaker); do sleep 10; done" No resources found in metallb-system namespace. speaker-8d28f 1/2 Running 0 10s oc wait pod -n metallb-system -l component=speaker --for condition=Ready --timeout=500s pod/speaker-8d28f condition met make operator_namespace make[1]: Entering directory '/home/zuul/src/github.com/openstack-k8s-operators/install_yamls' error: the server doesn't have a resource type "openstackversion" bash scripts/gen-namespace.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests ']' + '[' -z cert-manager-operator ']' + OUT_DIR=/home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator/namespace.yaml namespace/cert-manager-operator created timeout 500s bash -c "while ! (oc get project.v1.project.openshift.io cert-manager-operator); do sleep 1; done" NAME DISPLAY NAME STATUS cert-manager-operator Active oc project cert-manager-operator Now using project "cert-manager-operator" on server "https://api.crc.testing:6443". make[1]: Leaving directory '/home/zuul/src/github.com/openstack-k8s-operators/install_yamls' bash scripts/gen-olm-cert-manager.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/op ']' + '[' -z cert-manager-operator ']' + '[' -z stable-v1 ']' + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/op ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/op + echo OPERATOR_DIR /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/op OPERATOR_DIR /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/op + echo OPERATOR_NAMESPACE cert-manager-operator OPERATOR_NAMESPACE cert-manager-operator + echo CHANNEL stable-v1 CHANNEL stable-v1 + cat + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/op operatorgroup.operators.coreos.com/cert-manager-operator-bccwx created subscription.operators.coreos.com/openshift-cert-manager-operator created while ! (oc get pod --no-headers=true -l name=cert-manager-operator -n cert-manager-operator| grep "cert-manager-operator"); do sleep 10; done No resources found in cert-manager-operator namespace. No resources found in cert-manager-operator namespace. cert-manager-operator-controller-manager-64cf6dff88-6t8vd 0/1 ContainerCreating 0 2s oc wait pod -n cert-manager-operator --for condition=Ready -l name=cert-manager-operator --timeout=300s pod/cert-manager-operator-controller-manager-64cf6dff88-6t8vd condition met while ! (oc get pod --no-headers=true -l app=cainjector -n cert-manager | grep "cert-manager-cainjector"); do sleep 10; done No resources found in cert-manager namespace. cert-manager-cainjector-855d9ccff4-qnqwg 0/1 ContainerCreating 0 4s oc wait pod -n cert-manager -l app=cainjector --for condition=Ready --timeout=300s pod/cert-manager-cainjector-855d9ccff4-qnqwg condition met while ! (oc get pod --no-headers=true -l app=webhook -n cert-manager | grep "cert-manager-webhook"); do sleep 10; done cert-manager-webhook-f4fb5df64-dz5f5 0/1 Running 0 17s oc wait pod -n cert-manager -l app=webhook --for condition=Ready --timeout=300s pod/cert-manager-webhook-f4fb5df64-dz5f5 condition met while ! (oc get pod --no-headers=true -l app=cert-manager -n cert-manager | grep "cert-manager"); do sleep 10; done cert-manager-86cb77c54b-qmnzk 1/1 Running 0 15s oc wait pod -n cert-manager -l app=cert-manager --for condition=Ready --timeout=300s pod/cert-manager-86cb77c54b-qmnzk condition met bash scripts/gen-namespace.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests ']' + '[' -z openstack ']' + OUT_DIR=/home/zuul/ci-framework-data/artifacts/manifests/openstack + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack ']' + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack/namespace.yaml namespace/openstack unchanged timeout 500s bash -c "while ! (oc get project.v1.project.openshift.io openstack); do sleep 1; done" NAME DISPLAY NAME STATUS openstack Active oc project openstack Now using project "openstack" on server "https://api.crc.testing:6443". bash scripts/gen-netatt.sh + check_var_set DEPLOY_DIR + [[ ! -v DEPLOY_DIR ]] + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack/netattach/cr ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/openstack/netattach/cr + check_var_set INTERFACE + [[ ! -v INTERFACE ]] + check_var_set BRIDGE_NAME + [[ ! -v BRIDGE_NAME ]] + check_var_set VLAN_START + [[ ! -v VLAN_START ]] + check_var_set VLAN_STEP + [[ ! -v VLAN_STEP ]] + '[' -z true ']' + '[' -n true ']' + '[' -n '' ']' + '[' -n true ']' + check_var_set INTERNALAPI_PREFIX + [[ ! -v INTERNALAPI_PREFIX ]] + check_var_set STORAGE_PREFIX + [[ ! -v STORAGE_PREFIX ]] + check_var_set STORAGEMGMT_PREFIX + [[ ! -v STORAGEMGMT_PREFIX ]] + check_var_set TENANT_PREFIX + [[ ! -v TENANT_PREFIX ]] + check_var_set DESIGNATE_PREFIX + [[ ! -v DESIGNATE_PREFIX ]] + check_var_set DESIGNATE_EXT_PREFIX + [[ ! -v DESIGNATE_EXT_PREFIX ]] + echo DEPLOY_DIR /home/zuul/ci-framework-data/artifacts/manifests/openstack/netattach/cr DEPLOY_DIR /home/zuul/ci-framework-data/artifacts/manifests/openstack/netattach/cr + echo INTERFACE ens7 INTERFACE ens7 + echo VLAN_START 20 VLAN_START 20 + echo VLAN_STEP 1 VLAN_STEP 1 + '[' -n true ']' + echo CTLPLANE_IP_ADDRESS_PREFIX 192.168.122 CTLPLANE_IP_ADDRESS_PREFIX 192.168.122 + echo CTLPLANE_IP_ADDRESS_SUFFIX 10 CTLPLANE_IP_ADDRESS_SUFFIX 10 + echo 'INTERNALAPI_PREFIX 172.17.0' INTERNALAPI_PREFIX 172.17.0 + echo 'STORAGE_PREFIX 172.18.0' STORAGE_PREFIX 172.18.0 + echo 'STORAGEMGMT_PREFIX 172.20.0' STORAGEMGMT_PREFIX 172.20.0 + echo 'TENANT_PREFIX 172.19.0' TENANT_PREFIX 172.19.0 + echo 'DESIGNATE_PREFIX 172.28.0' DESIGNATE_PREFIX 172.28.0 + echo 'DESIGNATE_PREFIX 172.50.0' DESIGNATE_PREFIX 172.50.0 + '[' -n '' ']' + cat + '[' -n true ']' + cat + cat + cat + '[' -n true ']' + cat + cat + cat + '[' -n true ']' + cat + cat + cat + '[' -n true ']' + cat + cat + cat + cat + '[' -n true ']' + cat + cat + cat + '[' -n true ']' + cat + cat + cat + cat + '[' -n '' ']' + '[' -n '' ']' oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack/netattach/cr/ networkattachmentdefinition.k8s.cni.cncf.io/ctlplane created networkattachmentdefinition.k8s.cni.cncf.io/datacentre created networkattachmentdefinition.k8s.cni.cncf.io/designate created networkattachmentdefinition.k8s.cni.cncf.io/designateext created networkattachmentdefinition.k8s.cni.cncf.io/internalapi created networkattachmentdefinition.k8s.cni.cncf.io/octavia created networkattachmentdefinition.k8s.cni.cncf.io/storage created networkattachmentdefinition.k8s.cni.cncf.io/storagemgmt created networkattachmentdefinition.k8s.cni.cncf.io/tenant created oc delete --ignore-not-found=true -f /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/ipaddresspools.yaml error: the path "/home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/ipaddresspools.yaml" does not exist make: [Makefile:2556: metallb_config_cleanup] Error 1 (ignored) oc delete --ignore-not-found=true -f /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/l2advertisement.yaml error: the path "/home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/l2advertisement.yaml" does not exist make: [Makefile:2557: metallb_config_cleanup] Error 1 (ignored) oc delete --ignore-not-found=true -f /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgppeers.yaml error: the path "/home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgppeers.yaml" does not exist make: [Makefile:2558: metallb_config_cleanup] Error 1 (ignored) oc delete --ignore-not-found=true -f /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgpadvertisement.yaml error: the path "/home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgpadvertisement.yaml" does not exist make: [Makefile:2559: metallb_config_cleanup] Error 1 (ignored) oc delete --ignore-not-found=true -f /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgpextras.yaml error: the path "/home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgpextras.yaml" does not exist make: [Makefile:2560: metallb_config_cleanup] Error 1 (ignored) rm -Rf /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/ipaddresspools.yaml /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/l2advertisement.yaml /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgppeers.yaml /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgpadvertisement.yaml /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgpextras.yaml bash scripts/gen-metallb-config.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr ']' + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr ']' + '[' -z ens7 ']' + '[' -z ospbr ']' + '[' -z 64999 ']' + '[' -z 64999 ']' + '[' -z 100.65.4.1 ']' + '[' -z 100.64.4.1 ']' + '[' -z 172.30.4.2 ']' + '[' -z true ']' + echo DEPLOY_DIR /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr DEPLOY_DIR /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr + echo INTERFACE ens7 INTERFACE ens7 + echo CTLPLANE_METALLB_POOL 192.168.122.80-192.168.122.90 CTLPLANE_METALLB_POOL 192.168.122.80-192.168.122.90 + echo CTLPLANE_METALLB_IPV6_POOL fd00:aaaa::80-fd00:aaaa::90 CTLPLANE_METALLB_IPV6_POOL fd00:aaaa::80-fd00:aaaa::90 + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + '[' -n true ']' + cat + '[' -n '' ']' + cat + cat + cat + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/ipaddresspools.yaml ipaddresspool.metallb.io/ctlplane created ipaddresspool.metallb.io/internalapi created ipaddresspool.metallb.io/storage created ipaddresspool.metallb.io/tenant created ipaddresspool.metallb.io/designateext created oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/l2advertisement.yaml l2advertisement.metallb.io/ctlplane created l2advertisement.metallb.io/internalapi created l2advertisement.metallb.io/storage created l2advertisement.metallb.io/tenant created l2advertisement.metallb.io/designateext created bash scripts/gen-olm.sh + '[' -z openstack-operators ']' + '[' -z openstack ']' + '[' -z quay.io/openstack-k8s-operators/openstack-operator-index:latest ']' + '[' -z /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op ']' + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op ']' + mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op + OPERATOR_CHANNEL=alpha + OPERATOR_SOURCE=openstack-operator-index + OPERATOR_SOURCE_NAMESPACE=openstack-operators + echo OPERATOR_DIR /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op OPERATOR_DIR /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op + echo OPERATOR_CHANNEL alpha OPERATOR_CHANNEL alpha + echo OPERATOR_SOURCE openstack-operator-index OPERATOR_SOURCE openstack-operator-index + echo OPERATOR_SOURCE_NAMESPACE openstack-operators OPERATOR_SOURCE_NAMESPACE openstack-operators + cat + cat + cat bash scripts/gen-namespace.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests ']' + '[' -z openstack-operators ']' + OUT_DIR=/home/zuul/ci-framework-data/artifacts/manifests/openstack-operators + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators ']' + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/namespace.yaml Warning: resource namespaces/openstack-operators is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by oc apply. oc apply should only be used on resources created declaratively by either oc create --save-config or oc apply. The missing annotation will be patched automatically. namespace/openstack-operators configured timeout 500s bash -c "while ! (oc get project.v1.project.openshift.io openstack-operators); do sleep 1; done" NAME DISPLAY NAME STATUS openstack-operators Active oc project openstack-operators Now using project "openstack-operators" on server "https://api.crc.testing:6443". oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op catalogsource.operators.coreos.com/openstack-operator-index created operatorgroup.operators.coreos.com/openstack created subscription.operators.coreos.com/openstack-operator created ~/ci-framework-data/artifacts home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_009_run_netconfig.log0000644000175000017500000002057015134412157027330 0ustar zuulzuul~/src/github.com/openstack-k8s-operators/install_yamls ~/ci-framework-data/artifacts bash scripts/gen-namespace.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests ']' + '[' -z openstack ']' + OUT_DIR=/home/zuul/ci-framework-data/artifacts/manifests/openstack + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack ']' + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack/namespace.yaml namespace/openstack unchanged timeout 500s bash -c "while ! (oc get project.v1.project.openshift.io openstack); do sleep 1; done" NAME DISPLAY NAME STATUS openstack Active oc project openstack Already on project "openstack" on server "https://api.crc.testing:6443". bash scripts/gen-input-kustomize.sh + OUT=/home/zuul/ci-framework-data/artifacts/manifests + '[' -z openstack ']' + '[' -z osp-secret ']' + '[' -z 12345678 ']' + '[' -z 1234567842 ']' + '[' -z 767c3ed056cbaa3b9dfedb8c6f825bf0 ']' + '[' -z sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU= ']' + '[' -z COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f ']' + '[' -z openstack ']' + '[' -z libvirt-secret ']' + DIR=/home/zuul/ci-framework-data/artifacts/manifests/openstack/input + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack/input ']' + pushd /home/zuul/ci-framework-data/artifacts/manifests/openstack/input ~/ci-framework-data/artifacts/manifests/openstack/input ~/src/github.com/openstack-k8s-operators/install_yamls + cat oc get secret/osp-secret || oc kustomize /home/zuul/ci-framework-data/artifacts/manifests/openstack/input | oc apply -f - NAME TYPE DATA AGE osp-secret Opaque 27 5m55s oc kustomize /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr | oc delete --ignore-not-found=true -f - error: must build at directory: not a valid directory: evalsymlink failure on '/home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr' : lstat /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra: no such file or directory No resources found rm -Rf /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/operator /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/infra/op /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr bash scripts/clone-operator-repo.sh Cloning repo: git clone -b main https://github.com/openstack-k8s-operators/infra-operator.git infra-operator Cloning into 'infra-operator'... Running checkout: git checkout 0121df869109 Note: switching to '0121df869109'. You are in 'detached HEAD' state. You can look around, make experimental changes and commit them, and you can discard any commits you make in this state without impacting any branches by switching back to a branch. If you want to create a new branch to retain commits you create, you may do so (now or later) by using -c with the switch command. Example: git switch -c Or undo this operation with: git switch - Turn off this advice by setting config variable advice.detachedHead to false HEAD is now at 0121df8 Merge pull request #509 from lmiccini/instanceha-refactor cp /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr bash scripts/gen-service-kustomize.sh +++ dirname scripts/gen-service-kustomize.sh ++ cd scripts ++ pwd -P + SCRIPTPATH=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/scripts + . /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/scripts/common.sh --source-only ++ set -e + '[' -z openstack ']' + '[' -z NetConfig ']' + '[' -z osp-secret ']' + '[' -z /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr ']' + '[' -n '' ']' + REPLACEMENTS= + IMAGE=unused + IMAGE_PATH=containerImage + STORAGE_REQUEST=10G + INTERFACE_MTU=1500 + VLAN_START=20 + VLAN_STEP=1 + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr ']' + pushd /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr ~/ci-framework-data/artifacts/manifests/openstack/infra/cr ~/src/github.com/openstack-k8s-operators/install_yamls + cat + [[ NetConfig == \O\p\e\n\S\t\a\c\k\C\o\n\t\r\o\l\P\l\a\n\e ]] + IFS=, + read -ra IMAGES + IFS=, + read -ra IMAGE_PATHS + '[' 1 '!=' 1 ']' + (( i=0 )) + (( i < 1 )) + SPEC_PATH=containerImage + SPEC_VALUE=unused + '[' unused '!=' unused ']' + (( i++ )) + (( i < 1 )) + '[' -n '' ']' + '[' NetConfig == OpenStackControlPlane ']' + '[' NetConfig == Galera ']' + '[' NetConfig == NetConfig ']' + '[' -z true ']' + IPV6_SUBNET_INDEX=1 + VLAN_INTERNALAPI=20 + VLAN_STORAGE=21 + VLAN_TENANT=22 + VLAN_STORAGEMGMT=23 + cat + '[' -n true ']' + cat + REPLACEMENTS=' - source: kind: ConfigMap name: vlan-config fieldPath: data.internalapi targets: - select: kind: NetConfig fieldPaths: - spec.networks.[name=internalapi].subnets.0.vlan - source: kind: ConfigMap name: vlan-config fieldPath: data.storage targets: - select: kind: NetConfig fieldPaths: - spec.networks.[name=storage].subnets.0.vlan - source: kind: ConfigMap name: vlan-config fieldPath: data.storagemgmt targets: - select: kind: NetConfig fieldPaths: - spec.networks.[name=storagemgmt].subnets.0.vlan - source: kind: ConfigMap name: vlan-config fieldPath: data.tenant targets: - select: kind: NetConfig fieldPaths: - spec.networks.[name=tenant].subnets.0.vlan' + '[' -z true ']' + '[' -n '' ']' + '[' -n '' ']' + [[ NetConfig == \O\p\e\n\S\t\a\c\k\C\o\n\t\r\o\l\P\l\a\n\e ]] + [[ NetConfig == \O\p\e\n\S\t\a\c\k\C\o\n\t\r\o\l\P\l\a\n\e ]] + '[' -n ' - source: kind: ConfigMap name: vlan-config fieldPath: data.internalapi targets: - select: kind: NetConfig fieldPaths: - spec.networks.[name=internalapi].subnets.0.vlan - source: kind: ConfigMap name: vlan-config fieldPath: data.storage targets: - select: kind: NetConfig fieldPaths: - spec.networks.[name=storage].subnets.0.vlan - source: kind: ConfigMap name: vlan-config fieldPath: data.storagemgmt targets: - select: kind: NetConfig fieldPaths: - spec.networks.[name=storagemgmt].subnets.0.vlan - source: kind: ConfigMap name: vlan-config fieldPath: data.tenant targets: - select: kind: NetConfig fieldPaths: - spec.networks.[name=tenant].subnets.0.vlan' ']' + cat + kustomization_add_resources + echo merge config dir merge config dir ++ grep -v kustomization ++ find . -type f -name '*.yaml' + yamls='./network_v1beta1_netconfig.yaml ./vlan-config.yaml' + for y in ${yamls[@]} + kustomize edit add resource ./network_v1beta1_netconfig.yaml + for y in ${yamls[@]} + kustomize edit add resource ./vlan-config.yaml + popd ~/src/github.com/openstack-k8s-operators/install_yamls make wait make[1]: Entering directory '/home/zuul/src/github.com/openstack-k8s-operators/install_yamls' bash scripts/operator-wait.sh + TIMEOUT=500s +++ dirname scripts/operator-wait.sh ++ cd scripts ++ pwd -P + SCRIPTPATH=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/scripts + '[' -z openstack-operators ']' + '[' -z infra ']' + '[' infra = rabbitmq ']' + pushd /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/scripts ~/src/github.com/openstack-k8s-operators/install_yamls/scripts ~/src/github.com/openstack-k8s-operators/install_yamls + timeout 500s bash -c 'until [ "$(bash ./get-operator-status.sh)" == "Succeeded" ]; do sleep 5; done' + '[' -z openstack-operators ']' + '[' -z infra ']' + '[' infra = rabbitmq-cluster ']' + DEPL_NAME=infra-operator-controller-manager ++ oc get -n openstack-operators deployment infra-operator-controller-manager -o json ++ jq -e .status.availableReplicas + REPLICAS=1 + '[' 1 '!=' 1 ']' + echo Succeeded + exit 0 + rc=0 + popd ~/src/github.com/openstack-k8s-operators/install_yamls + exit 0 make[1]: Leaving directory '/home/zuul/src/github.com/openstack-k8s-operators/install_yamls' bash scripts/operator-deploy-resources.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr ']' + NEXT_WAIT_TIME=0 + '[' 0 -eq 15 ']' + oc kustomize /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr + oc apply -f - netconfig.network.openstack.org/netconfig created + '[' 0 -lt 15 ']' ~/ci-framework-data/artifacts home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_007_run_openstack.log0000644000175000017500000000560215134412151027332 0ustar zuulzuul~/src/github.com/openstack-k8s-operators/install_yamls ~/ci-framework-data/artifacts error: the server doesn't have a resource type "openstackversion" # call make_openstack if it isn't already bash -c '(oc get subscription -n openstack-operators openstack-operator || make openstack) || true' NAME PACKAGE SOURCE CHANNEL openstack-operator openstack-operator openstack-operator-index alpha timeout 500s bash -c 'until $(oc get csv -l operators.coreos.com/openstack-operator.openstack-operators -n openstack-operators | grep -q Succeeded); do sleep 1; done' bash -c 'test -f /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/operator_v1beta1_openstack.yaml || make openstack_repo' make[1]: Entering directory '/home/zuul/src/github.com/openstack-k8s-operators/install_yamls' error: the server doesn't have a resource type "openstackversion" mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/operator /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr bash -c "test -d /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator || CHECKOUT_FROM_OPENSTACK_REF=false scripts/clone-operator-repo.sh" Cloning repo: git clone -b main https://github.com/openstack-k8s-operators/openstack-operator.git openstack-operator Cloning into 'openstack-operator'... make[1]: Leaving directory '/home/zuul/src/github.com/openstack-k8s-operators/install_yamls' oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/operator_v1beta1_openstack.yaml openstack.operator.openstack.org/openstack created # FIXME: Ugly hack to prevent OpenStack Baremetal operator from crashing when BMO is not installed if ! echo "" | grep -q "baremetalhosts.metal3.io"; then \ curl -o /tmp/bmh_crd.yaml --retry-all-errors --retry 5 --retry-delay 10 https://raw.githubusercontent.com/metal3-io/baremetal-operator/refs/heads/main/config/base/crds/bases/metal3.io_baremetalhosts.yaml; \ oc apply -f /tmp/bmh_crd.yaml; \ rm -f /tmp/bmh_crd.yaml; \ fi % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 62515 100 62515 0 0 911k 0 --:--:-- --:--:-- --:--:-- 924k customresourcedefinition.apiextensions.k8s.io/baremetalhosts.metal3.io created oc wait openstack/openstack -n openstack-operators --for condition=Ready --timeout=500s openstack.operator.openstack.org/openstack condition met timeout 500s bash -c "while ! (oc get services -n openstack-operators | grep -E '^(openstack|openstack-baremetal|infra)-operator-webhook-service' | wc -l | grep -q -e 3); do sleep 5; done" ~/ci-framework-data/artifacts home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_008_run_openstack_deploy.log0000644000175000017500000001055315134412153030712 0ustar zuulzuul~/src/github.com/openstack-k8s-operators/install_yamls ~/ci-framework-data/artifacts bash scripts/gen-namespace.sh + '[' -z /home/zuul/ci-framework-data/artifacts/manifests ']' + '[' -z openstack ']' + OUT_DIR=/home/zuul/ci-framework-data/artifacts/manifests/openstack + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack ']' + cat oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack/namespace.yaml namespace/openstack unchanged timeout 500s bash -c "while ! (oc get project.v1.project.openshift.io openstack); do sleep 1; done" NAME DISPLAY NAME STATUS openstack Active oc project openstack Now using project "openstack" on server "https://api.crc.testing:6443". oc kustomize /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr | oc delete --ignore-not-found=true -f - error: must build at directory: not a valid directory: evalsymlink failure on '/home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr' : lstat /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra: no such file or directory No resources found true /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr oc kustomize /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr | oc delete --ignore-not-found=true -f - || true error: unable to find one of 'kustomization.yaml', 'kustomization.yml' or 'Kustomization' in directory '/home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr' No resources found true /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr mkdir -p /home/zuul/ci-framework-data/artifacts/manifests/operator /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr bash -c "test -d /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator || CHECKOUT_FROM_OPENSTACK_REF=false scripts/clone-operator-repo.sh" cp /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr bash scripts/gen-service-kustomize.sh +++ dirname scripts/gen-service-kustomize.sh ++ cd scripts ++ pwd -P + SCRIPTPATH=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/scripts + . /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/scripts/common.sh --source-only ++ set -e + '[' -z openstack ']' + '[' -z OpenStackControlPlane ']' + '[' -z osp-secret ']' + '[' -z /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr ']' + '[' -n '' ']' + REPLACEMENTS= + IMAGE=unused + IMAGE_PATH=containerImage + STORAGE_REQUEST=10G + INTERFACE_MTU=1500 + VLAN_START=20 + VLAN_STEP=1 + '[' '!' -d /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr ']' + pushd /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr ~/ci-framework-data/artifacts/manifests/openstack/openstack/cr ~/src/github.com/openstack-k8s-operators/install_yamls + cat + [[ OpenStackControlPlane == \O\p\e\n\S\t\a\c\k\C\o\n\t\r\o\l\P\l\a\n\e ]] + [[ '' != '' ]] + IFS=, + read -ra IMAGES + IFS=, + read -ra IMAGE_PATHS + '[' 1 '!=' 1 ']' + (( i=0 )) + (( i < 1 )) + SPEC_PATH=containerImage + SPEC_VALUE=unused + '[' unused '!=' unused ']' + (( i++ )) + (( i < 1 )) + '[' -n '' ']' + '[' OpenStackControlPlane == OpenStackControlPlane ']' + cat + '[' OpenStackControlPlane == Galera ']' + '[' OpenStackControlPlane == NetConfig ']' + '[' -n '' ']' + [[ OpenStackControlPlane == \O\p\e\n\S\t\a\c\k\C\o\n\t\r\o\l\P\l\a\n\e ]] + [[ true == \t\r\u\e ]] + cat + [[ OpenStackControlPlane == \O\p\e\n\S\t\a\c\k\C\o\n\t\r\o\l\P\l\a\n\e ]] + '[' -z true ']' + cat + '[' -n '' ']' + '[' -z true ']' + '[' -n true ']' + '[' -n '' ']' + '[' -n '' ']' + kustomization_add_resources + echo merge config dir merge config dir ++ find . -type f -name '*.yaml' ++ grep -v kustomization + yamls=./core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml + for y in ${yamls[@]} + kustomize edit add resource ./core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml + popd ~/src/github.com/openstack-k8s-operators/install_yamls ~/ci-framework-data/artifacts home/zuul/zuul-output/logs/ci-framework-data/logs/ci_script_010_apply_the.log0000644000175000017500000000007615134412161026437 0ustar zuulzuulopenstackcontrolplane.core.openstack.org/controlplane created home/zuul/zuul-output/logs/ci-framework-data/artifacts/0000755000175000017500000000000015134437343022352 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/operator_images.yaml0000644000175000017500000001702015134437340026413 0ustar zuulzuulcifmw_csv_images: - RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT: quay.io/podified-antelope-centos9/openstack-octavia-health-manager@sha256:3f746f7c6a8c48c0f4a800dcb4bc49bfbc4de4a9ca6a55d8f22bc515a92ea1d9 - RELATED_IMAGE_BARBICAN_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/barbican-operator@sha256:e5e017be64edd679623ea1b7e6a1ae780fdcee4ef79be989b93d8c1d082da15b - RELATED_IMAGE_CINDER_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/cinder-operator@sha256:e950ac2df7be78ae0cbcf62fe12ee7a06b628f1903da6fcb741609e857eb1a7f - RELATED_IMAGE_DESIGNATE_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/designate-operator@sha256:6c88312afa9673f7b72c558368034d7a488ead73080cdcdf581fe85b99263ece - RELATED_IMAGE_GLANCE_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337 - RELATED_IMAGE_HEAT_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492 - RELATED_IMAGE_HORIZON_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822 - RELATED_IMAGE_INFRA_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/infra-operator@sha256:2eac1b9dadaddf4734f35e3dd1996dca960e97d2f304cbd48254b900a840a84a - RELATED_IMAGE_IRONIC_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/ironic-operator@sha256:d3c55b59cb192799f8d31196c55c9e9bb3cd38aef7ec51ef257dabf1548e8b30 - RELATED_IMAGE_KEYSTONE_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349 - RELATED_IMAGE_MANILA_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8 - RELATED_IMAGE_MARIADB_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/mariadb-operator@sha256:ff0b6c27e2d96afccd73fbbb5b5297a3f60c7f4f1dfd2a877152466697018d71 - RELATED_IMAGE_NEUTRON_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/neutron-operator@sha256:b57d65d2a968705b9067192a7cb33bd4a12489db87e1d05de78c076f2062cab4 - RELATED_IMAGE_NOVA_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/nova-operator@sha256:4e995cfa360a9d595a01b9c0541ab934692f2374203cb5738127dd784f793831 - RELATED_IMAGE_OCTAVIA_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/octavia-operator@sha256:a8fc8f9d445b1232f446119015b226008b07c6a259f5bebc1fcbb39ec310afe5 - RELATED_IMAGE_OPENSTACK_BAREMETAL_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:dae767a3ae652ffc70ba60c5bf2b5bf72c12d939353053e231b258948ededb22 - RELATED_IMAGE_OVN_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf - RELATED_IMAGE_PLACEMENT_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/placement-operator@sha256:65cfe5b9d5b0571aaf8ff9840b12cc56e90ca4cef162dd260c3a9fa2b52c6dd0 - RELATED_IMAGE_RABBITMQ_CLUSTER_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2 - RELATED_IMAGE_SWIFT_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922 - RELATED_IMAGE_TELEMETRY_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127 - RELATED_IMAGE_TEST_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d - RELATED_IMAGE_WATCHER_OPERATOR_MANAGER_IMAGE_URL: quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b cifmw_openstack_operator_index_image: OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index@sha256:60fcdab102d9e8add108088834b9b6fd5244dfde4923882e1dee1666daa69409 cifmw_operator_images: BARBICAN_OP_IMG: quay.io/openstack-k8s-operators/barbican-operator@sha256:e5e017be64edd679623ea1b7e6a1ae780fdcee4ef79be989b93d8c1d082da15b CINDER_OP_IMG: quay.io/openstack-k8s-operators/cinder-operator@sha256:e950ac2df7be78ae0cbcf62fe12ee7a06b628f1903da6fcb741609e857eb1a7f DESIGNATE_OP_IMG: quay.io/openstack-k8s-operators/designate-operator@sha256:6c88312afa9673f7b72c558368034d7a488ead73080cdcdf581fe85b99263ece GLANCE_OP_IMG: quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337 HEAT_OP_IMG: quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492 HORIZON_OP_IMG: quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822 INFRA_OP_IMG: quay.io/openstack-k8s-operators/infra-operator@sha256:2eac1b9dadaddf4734f35e3dd1996dca960e97d2f304cbd48254b900a840a84a IRONIC_OP_IMG: quay.io/openstack-k8s-operators/ironic-operator@sha256:d3c55b59cb192799f8d31196c55c9e9bb3cd38aef7ec51ef257dabf1548e8b30 KEYSTONE_OP_IMG: quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349 MANILA_OP_IMG: quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8 MARIADB_OP_IMG: quay.io/openstack-k8s-operators/mariadb-operator@sha256:ff0b6c27e2d96afccd73fbbb5b5297a3f60c7f4f1dfd2a877152466697018d71 NEUTRON_OP_IMG: quay.io/openstack-k8s-operators/neutron-operator@sha256:b57d65d2a968705b9067192a7cb33bd4a12489db87e1d05de78c076f2062cab4 NOVA_OP_IMG: quay.io/openstack-k8s-operators/nova-operator@sha256:4e995cfa360a9d595a01b9c0541ab934692f2374203cb5738127dd784f793831 OCTAVIA_OP_IMG: quay.io/openstack-k8s-operators/octavia-operator@sha256:a8fc8f9d445b1232f446119015b226008b07c6a259f5bebc1fcbb39ec310afe5 OPENSTACK-BAREMETAL_OP_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:dae767a3ae652ffc70ba60c5bf2b5bf72c12d939353053e231b258948ededb22 OPENSTACK-INIT_OP_IMG: quay.io/openstack-k8s-operators/openstack-operator@sha256:e77aeb22b3361e4806ddfa4a13e473fd8adc66d592fb11210777b2870d6da13f OPENSTACK_OP_IMG: quay.io/openstack-k8s-operators/openstack-operator@sha256:e77aeb22b3361e4806ddfa4a13e473fd8adc66d592fb11210777b2870d6da13f OVN_OP_IMG: quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf PLACEMENT_OP_IMG: quay.io/openstack-k8s-operators/placement-operator@sha256:65cfe5b9d5b0571aaf8ff9840b12cc56e90ca4cef162dd260c3a9fa2b52c6dd0 RABBITMQ_OP_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2 SWIFT_OP_IMG: quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922 TELEMETRY_OP_IMG: quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127 TEST_OP_IMG: quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d WATCHER_OP_IMG: quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_run_hook_without_retry.sh0000644000175000017500000000161115134411257032153 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_run_hook_without_retry.log) 2>&1 export ANSIBLE_CONFIG="/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/ansible.cfg" export ANSIBLE_LOG_PATH="/home/zuul/ci-framework-data/logs/pre_infra_download_needed_tools.log" ansible-playbook -i localhost, -c local -e namespace=openstack -e "@/home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml" -e "cifmw_basedir=/home/zuul/ci-framework-data" -e "step=pre_infra" -e "hook_name=download_needed_tools" -e "playbook_dir=/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_001_fetch_openshift.sh0000644000175000017500000000032515134411323030463 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_001_fetch_openshift.log) 2>&1 oc login -u kubeadmin -p 123456789 --insecure-skip-tls-verify=true api.crc.testing:6443 ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_002_login_into_openshift_internal.shhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_002_login_into_openshift_internal.s0000644000175000017500000000044515134411334033265 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_002_login_into_openshift_internal.log) 2>&1 podman login -u kubeadmin -p sha256~DRDkWcBbIdKn0u9Nxm-2md9dPVf-gVXLRNv2O8KcEp4 --tls-verify=false default-route-openshift-image-registry.apps-crc.testing home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_003_run_hook_without_retry_fetch.sh0000644000175000017500000000202615134411341033322 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_003_run_hook_without_retry_fetch.log) 2>&1 export ANSIBLE_CONFIG="/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/ansible.cfg" export ANSIBLE_LOG_PATH="/home/zuul/ci-framework-data/logs/pre_deploy_fetch_compute_facts.log" ansible-playbook -i /home/zuul/ci-framework-data/artifacts/zuul_inventory.yml -e namespace=openstack -e "@/home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml" -e "@/home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml" -e "cifmw_basedir=/home/zuul/ci-framework-data" -e "step=pre_deploy" -e "hook_name=fetch_compute_facts" -e "playbook_dir=/home/zuul/src/github.com/openstack-k8s-operators/ci-framework/hooks/playbooks" /home/zuul/src/github.com/openstack-k8s-operators/ci-framework/hooks/playbooks/fetch_compute_facts.yml home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_008_run_openstack_deploy.sh0000644000175000017500000000144015134412151031550 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_008_run_openstack_deploy.log) 2>&1 pushd /home/zuul/src/github.com/openstack-k8s-operators/install_yamls export BMO_SETUP="False" export OUT="/home/zuul/ci-framework-data/artifacts/manifests" export OUTPUT_DIR="/home/zuul/ci-framework-data/artifacts/edpm" export CHECKOUT_FROM_OPENSTACK_REF="true" export OPENSTACK_K8S_BRANCH="main" export KUBECONFIG="/home/zuul/.crc/machines/crc/kubeconfig" export PATH="/home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin" export NETWORK_MTU="1500" export NNCP_DNS_SERVER="192.168.122.10" export NNCP_INTERFACE="ens7" export CLEANUP_DIR_CMD="true" make openstack_deploy_prep popd home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_011_run_standalone.sh0000644000175000017500000000164515134413177030347 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_011_run_standalone.log) 2>&1 pushd /home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup export SSH_KEY_FILE="/home/zuul/.ssh/id_rsa" export NTP_SERVER="pool.ntp.org" export STANDALONE_VM="false" export OS_NET_CONFIG_IFACE="nic2" export REPO_SETUP_CMDS="/home/zuul/cdn_subscription_repos.sh" export GATEWAY="192.168.122.10" export EDPM_COMPUTE_NETWORK_IP="192.168.122" export IP="192.168.122.100" export IP_ADRESS_SUFFIX="100" export DATAPLANE_DNS_SERVER="192.168.122.10" export HOST_PRIMARY_RESOLV_CONF_ENTRY="192.168.122.10" export EDPM_COMPUTE_CEPH_ENABLED="false" export EDPM_COMPUTE_CEPH_NOVA="false" export DNS_DOMAIN="ooo.test" export TLS_ENABLED="true" export TELEMETRY_ENABLED="true" export OCTAVIA_ENABLED="true" export BARBICAN_SERVICE_ENABLED="false" export SWIFT_REPLICATED="false" make standalone_deploy popd home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_004_run_crc.sh0000644000175000017500000000135315134411362026756 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_004_run_crc.log) 2>&1 pushd /home/zuul/src/github.com/openstack-k8s-operators/install_yamls export BMO_SETUP="False" export OUT="/home/zuul/ci-framework-data/artifacts/manifests" export OUTPUT_DIR="/home/zuul/ci-framework-data/artifacts/edpm" export CHECKOUT_FROM_OPENSTACK_REF="true" export OPENSTACK_K8S_BRANCH="main" export KUBECONFIG="/home/zuul/.crc/machines/crc/kubeconfig" export PATH="/home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin" export NETWORK_MTU="1500" export NNCP_DNS_SERVER="192.168.122.10" export NNCP_INTERFACE="ens7" make crc_storage popd home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_005_run.sh0000644000175000017500000000134115134411410026117 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_005_run.log) 2>&1 pushd /home/zuul/src/github.com/openstack-k8s-operators/install_yamls export BMO_SETUP="False" export OUT="/home/zuul/ci-framework-data/artifacts/manifests" export OUTPUT_DIR="/home/zuul/ci-framework-data/artifacts/edpm" export CHECKOUT_FROM_OPENSTACK_REF="true" export OPENSTACK_K8S_BRANCH="main" export KUBECONFIG="/home/zuul/.crc/machines/crc/kubeconfig" export PATH="/home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin" export NETWORK_MTU="1500" export NNCP_DNS_SERVER="192.168.122.10" export NNCP_INTERFACE="ens7" make input popd home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_006_run.sh0000644000175000017500000000134515134411411026125 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_006_run.log) 2>&1 pushd /home/zuul/src/github.com/openstack-k8s-operators/install_yamls export BMO_SETUP="False" export OUT="/home/zuul/ci-framework-data/artifacts/manifests" export OUTPUT_DIR="/home/zuul/ci-framework-data/artifacts/edpm" export CHECKOUT_FROM_OPENSTACK_REF="true" export OPENSTACK_K8S_BRANCH="main" export KUBECONFIG="/home/zuul/.crc/machines/crc/kubeconfig" export PATH="/home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin" export NETWORK_MTU="1500" export NNCP_DNS_SERVER="192.168.122.10" export NNCP_INTERFACE="ens7" make openstack popd home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_009_run_netconfig.sh0000644000175000017500000000136615134412153030173 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_009_run_netconfig.log) 2>&1 pushd /home/zuul/src/github.com/openstack-k8s-operators/install_yamls export BMO_SETUP="False" export OUT="/home/zuul/ci-framework-data/artifacts/manifests" export OUTPUT_DIR="/home/zuul/ci-framework-data/artifacts/edpm" export CHECKOUT_FROM_OPENSTACK_REF="true" export OPENSTACK_K8S_BRANCH="main" export KUBECONFIG="/home/zuul/.crc/machines/crc/kubeconfig" export PATH="/home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin" export NETWORK_MTU="1500" export NNCP_DNS_SERVER="192.168.122.10" export NNCP_INTERFACE="ens7" make netconfig_deploy popd home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_007_run_openstack.sh0000644000175000017500000000136415134412001030172 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_007_run_openstack.log) 2>&1 pushd /home/zuul/src/github.com/openstack-k8s-operators/install_yamls export BMO_SETUP="False" export OUT="/home/zuul/ci-framework-data/artifacts/manifests" export OUTPUT_DIR="/home/zuul/ci-framework-data/artifacts/edpm" export CHECKOUT_FROM_OPENSTACK_REF="true" export OPENSTACK_K8S_BRANCH="main" export KUBECONFIG="/home/zuul/.crc/machines/crc/kubeconfig" export PATH="/home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin" export NETWORK_MTU="1500" export NNCP_DNS_SERVER="192.168.122.10" export NNCP_INTERFACE="ens7" make openstack_init popd home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_010_apply_the.sh0000644000175000017500000000035215134412160027300 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_010_apply_the.log) 2>&1 oc apply -f /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr/cifmw-kustomization-result.yaml home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_check_for_oc.sh0000644000175000017500000000020715134432025027717 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_check_for_oc.log) 2>&1 command -v oc home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_script_000_run_openstack_must_gather.sh0000644000175000017500000000135115134432026032572 0ustar zuulzuul#!/bin/bash set -euo pipefail exec > >(tee -i /home/zuul/ci-framework-data/logs/ci_script_000_run_openstack_must_gather.log) 2>&1 timeout 2700.0 oc adm must-gather --image quay.io/openstack-k8s-operators/openstack-must-gather:latest --timeout 30m --host-network=False --dest-dir /home/zuul/ci-framework-data/logs/openstack-must-gather --volume-percentage=80 -- ADDITIONAL_NAMESPACES=kuttl,openshift-storage,openshift-marketplace,openshift-operators,sushy-emulator,tobiko OPENSTACK_DATABASES=$OPENSTACK_DATABASES SOS_EDPM=$SOS_EDPM SOS_DECOMPRESS=$SOS_DECOMPRESS gather 2>&1 || { rc=$? if [ $rc -eq 124 ]; then echo "The must gather command did not finish on time!" echo "2700.0 seconds was not enough to finish the task." fi } home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible-vars.yml0000644000175000017500000151134215134437263025473 0ustar zuulzuul_included_dir: changed: false failed: false stat: atime: 1769083616.899712 attr_flags: '' attributes: [] block_size: 4096 blocks: 0 charset: binary ctime: 1769083606.369452 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 21031368 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1769083606.369452 nlink: 2 path: /home/zuul/ci-framework-data/artifacts/parameters pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 120 uid: 1000 version: '341120645' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true _included_file: changed: false failed: false stat: atime: 1769083618.0127394 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: us-ascii checksum: 0f8bc97dff23c5aaddd2afa844d83c2d10d2979b ctime: 1769083605.6694345 dev: 64513 device_type: 0 executable: false exists: true gid: 1000 gr_name: zuul inode: 125913305 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: text/plain mode: '0600' mtime: 1769083605.48243 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml pw_name: zuul readable: true rgrp: false roth: false rusr: true size: 280 uid: 1000 version: '3749842572' wgrp: false woth: false writeable: true wusr: true xgrp: false xoth: false xusr: false _parsed_vars: changed: false content: Y2lmbXdfb3BlbnNoaWZ0X2FwaTogYXBpLmNyYy50ZXN0aW5nOjY0NDMKY2lmbXdfb3BlbnNoaWZ0X2NvbnRleHQ6IGRlZmF1bHQvYXBpLWNyYy10ZXN0aW5nOjY0NDMva3ViZWFkbWluCmNpZm13X29wZW5zaGlmdF9rdWJlY29uZmlnOiAvaG9tZS96dXVsLy5jcmMvbWFjaGluZXMvY3JjL2t1YmVjb25maWcKY2lmbXdfb3BlbnNoaWZ0X3Rva2VuOiBzaGEyNTZ+RFJEa1djQmJJZEtuMHU5TnhtLTJtZDlkUFZmLWdWWExSTnYyTzhLY0VwNApjaWZtd19vcGVuc2hpZnRfdXNlcjoga3ViZWFkbWluCg== encoding: base64 failed: false source: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml _tmp_dir: changed: true failed: false gid: 10001 group: zuul mode: '0700' owner: zuul path: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/tmp/ansible.twkksh2e size: 40 state: directory uid: 10001 _yaml_files: changed: false examined: 4 failed: false files: - atime: 1769083486.596494 ctime: 1769083475.0302083 dev: 64513 gid: 1000 gr_name: zuul inode: 37775555 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0644' mtime: 1769083474.3421915 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml pw_name: zuul rgrp: true roth: true rusr: true size: 15078 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1769083617.995739 ctime: 1769083606.372452 dev: 64513 gid: 1000 gr_name: zuul inode: 142638110 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0600' mtime: 1769083606.209448 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml pw_name: zuul rgrp: false roth: false rusr: true size: 28122 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1769083567.6534958 ctime: 1769083564.620421 dev: 64513 gid: 1000 gr_name: zuul inode: 96504617 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0644' mtime: 1769083564.4534168 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml pw_name: zuul rgrp: true roth: true rusr: true size: 1252 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1769083618.0127394 ctime: 1769083605.6694345 dev: 64513 gid: 1000 gr_name: zuul inode: 125913305 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0600' mtime: 1769083605.48243 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml pw_name: zuul rgrp: false roth: false rusr: true size: 280 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false matched: 4 msg: All paths examined skipped_paths: {} adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_all_ipv4_addresses: - 38.102.83.83 ansible_all_ipv6_addresses: - fe80::f816:3eff:fe4f:9ce2 ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=22ac9141-3960-4912-b20e-19fc8a328d40 ansible_collection_name: null ansible_config_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2026-01-22' day: '22' epoch: '1769092100' epoch_int: '1769092100' hour: '14' iso8601: '2026-01-22T14:28:20Z' iso8601_basic: 20260122T142820041113 iso8601_basic_short: 20260122T142820 iso8601_micro: '2026-01-22T14:28:20.041113Z' minute: '28' month: '01' second: '20' time: '14:28:20' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Thursday weekday_number: '4' weeknumber: '03' year: '2026' ansible_default_ipv4: address: 38.102.83.83 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:4f:9c:e2 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_dependent_role_names: [] ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2026-01-22-11-49-38-00 vda1: - 22ac9141-3960-4912-b20e-19fc8a328d40 ansible_devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2026-01-22-11-49-38-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 22ac9141-3960-4912-b20e-19fc8a328d40 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: 22ac9141-3960-4912-b20e-19fc8a328d40 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: CentOS ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/centos-release ansible_distribution_file_variety: CentOS ansible_distribution_major_version: '9' ansible_distribution_release: Stream ansible_distribution_version: '9' ansible_dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 60544 22 SSH_CONNECTION: 38.102.83.114 60544 38.102.83.83 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '18' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f ansible_eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.83 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe4f:9ce2 prefix: '64' scope: link macaddress: fa:16:3e:4f:9c:e2 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.83 all_ipv6_addresses: - fe80::f816:3eff:fe4f:9ce2 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=22ac9141-3960-4912-b20e-19fc8a328d40 crc_ci_bootstrap_instance_default_net_config: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: config_nm: false ip: 172.17.0.100 - key: storage value: config_nm: false ip: 172.18.0.100 - key: storage_mgmt value: config_nm: false ip: 172.20.0.100 - key: tenant value: config_nm: false ip: 172.19.0.100 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2026-01-22T11:53:24Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-100.openstacklocal. hostname: host-192-168-122-100 ip_address: 192.168.122.100 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.100 subnet_id: 7a5d6965-a3da-4c6f-bc86-42f95e634461 hardware_offload_type: null hints: '' id: 6ee4c47b-3df3-4385-88f3-b108a8f825dd ip_allocation: immediate mac_address: fa:16:3e:b5:53:e5 name: standalone-71c2ce28-24b8-4530-b131-72e04889a182 network_id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2026-01-22T11:53:24Z' crc_ci_bootstrap_network_name: zuul-ci-net-3911aa2e crc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:b2:1f:bf mtu: '1500' internal-api: connection: ci-private-network-20 iface: eth1.20 ip: 172.17.0.4/24 mac: 52:54:00:91:b6:a0 mtu: '1496' parent_iface: eth1 vlan: 20 storage: connection: ci-private-network-21 iface: eth1.21 ip: 172.18.0.4/24 mac: 52:54:00:8c:4a:57 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: eth1.23 ip: 172.20.0.4/24 mac: 52:54:00:e6:85:75 mtu: '1496' parent_iface: eth1 vlan: 23 tenant: connection: ci-private-network-22 iface: eth1.22 ip: 172.19.0.4/24 mac: 52:54:00:34:d6:b6 mtu: '1496' parent_iface: eth1 vlan: 22 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:ee:8d:ea mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:98:87:e6 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:fe:f9:a5 mtu: '1496' parent_iface: ens7 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: ens7.23 ip: 172.20.0.5/24 mac: 52:54:00:66:52:20 mtu: '1496' parent_iface: ens7 vlan: 23 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:c7:ca:3e mtu: '1496' parent_iface: ens7 vlan: 22 standalone: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:b5:53:e5 mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:87:a9:f6 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:4f:71:e3 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: iface: eth1.23 ip: 172.20.0.100/24 mac: 52:54:00:40:23:1f mtu: '1496' parent_iface: eth1 vlan: 23 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:f6:c7:15 mtu: '1496' parent_iface: eth1 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2026-01-22T11:51:59Z' description: '' dns_domain: '' id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: true l2_adjacency: true mtu: 1500 name: zuul-ci-net-3911aa2e port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2026-01-22T11:51:59Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2026-01-22T11:52:05Z' description: '' enable_ndp_proxy: null external_gateway_info: null flavor_id: null id: 68d8267d-56ed-4ee3-aa04-a2f7bd2ffe0b name: zuul-ci-subnet-router-3911aa2e project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 1 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2026-01-22T11:52:05Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2026-01-22T11:52:03Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: 7a5d6965-a3da-4c6f-bc86-42f95e634461 ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-3911aa2e network_id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2026-01-22T11:52:03Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-3911aa2e crc_ci_bootstrap_subnet_name: zuul-ci-subnet-3911aa2e date_time: date: '2026-01-22' day: '22' epoch: '1769092100' epoch_int: '1769092100' hour: '14' iso8601: '2026-01-22T14:28:20Z' iso8601_basic: 20260122T142820041113 iso8601_basic_short: 20260122T142820 iso8601_micro: '2026-01-22T14:28:20.041113Z' minute: '28' month: '01' second: '20' time: '14:28:20' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Thursday weekday_number: '4' weeknumber: '03' year: '2026' default_ipv4: address: 38.102.83.83 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:4f:9c:e2 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2026-01-22-11-49-38-00 vda1: - 22ac9141-3960-4912-b20e-19fc8a328d40 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2026-01-22-11-49-38-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 22ac9141-3960-4912-b20e-19fc8a328d40 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: 22ac9141-3960-4912-b20e-19fc8a328d40 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 60544 22 SSH_CONNECTION: 38.102.83.114 60544 38.102.83.83 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '18' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.83 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe4f:9ce2 prefix: '64' scope: link macaddress: fa:16:3e:4f:9c:e2 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: controller gather_subset: - min hostname: controller hostnqn: nqn.2014-08.org.nvmexpress:uuid:5350774e-8b5e-4dba-80a9-92d405981c1d interfaces: - lo - eth0 is_chroot: false iscsi_iqn: '' kernel: 5.14.0-661.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Jan 16 09:19:22 UTC 2026' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.05 1m: 0.39 5m: 0.14 locally_reachable_ips: ipv4: - 38.102.83.83 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe4f:9ce2 lsb: {} lvm: N/A machine: x86_64 machine_id: 85ac68c10a6e7ae08ceb898dbdca0cb5 memfree_mb: 7084 memory_mb: nocache: free: 7292 used: 387 real: free: 7084 total: 7679 used: 595 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 7679 module_setup: true mounts: - block_available: 20341226 block_size: 4096 block_total: 20954875 block_used: 613649 device: /dev/vda1 fstype: xfs inode_available: 41888619 inode_total: 41942512 inode_used: 53893 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83317661696 size_total: 85831168000 uuid: 22ac9141-3960-4912-b20e-19fc8a328d40 nodename: controller os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=22ac9141-3960-4912-b20e-19fc8a328d40 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 8 processor_nproc: 8 processor_threads_per_core: 1 processor_vcpus: 8 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.3.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 25 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 25 - final - 0 python_version: 3.9.25 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMKcpNrJpAx4CrcANXc7Mn4hy3MVnmG3hOgRXXsREK9Nr1ubSroZOpt4L9Fqy2kwyrYygLJWw/0Ub6eRiHYEsC8= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIFnFPB1/0DZCQsBc2ZzNHDFdrc4p6KHouz9T+kXxP6Mv ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQC6HMCq1xxuJz4wWSauIns7H1KBFyS3Tk+19hW3WD1ndZ/bhbUbu/Rs5Ec0vOmp7s7EkHiuKXTatx4MjNzbUn6HsmYAZ8KDmxg1tlwm0YfSXEOBPVYxQj4PBNn4QD4CYpKyS1zS8Ely7Yym8eZ095ZwSz/BgCg/D7Tv5gEKNlMDY+Nh3eHp0yZHUCUd5znPesQynCClZjvXcU2ufx+FkKmwymn+yuVvC/scPXO17zyVxad783DV/4CFZEQD7NCTT0eX/8JTKhIBcNZlSHVmJZmRhdbl6ZKos4vLh3TecvWjEXfNuQL/Kco5I0KsbwWlSnBR0WPyVi06rg+gGruQdT2NEqJy9cGWkTLILsm201OrkG2ctFFqBnNqhSSHFCdHr89m2f5+FIx6vuPKo41YBK4Adz0RWxmR2/gZEIz7eVaPTNVtGS173eNxki861y7wowKHtZfqyo5YrgCJvtHDXprxnPqw6uMoOCOwCjfOV6IgjQfNH/R2CrKf9LRyF2/9pys= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 74 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack zuul_change_list: - data-plane-adoption ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: controller ansible_host: 38.102.83.83 ansible_hostname: controller ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:5350774e-8b5e-4dba-80a9-92d405981c1d ansible_interfaces: - lo - eth0 ansible_inventory_sources: - /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/inventory.yaml ansible_is_chroot: false ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-661.el9.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Jan 16 09:19:22 UTC 2026' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.05 1m: 0.39 5m: 0.14 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.83 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe4f:9ce2 ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 85ac68c10a6e7ae08ceb898dbdca0cb5 ansible_memfree_mb: 7084 ansible_memory_mb: nocache: free: 7292 used: 387 real: free: 7084 total: 7679 used: 595 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 7679 ansible_mounts: - block_available: 20341226 block_size: 4096 block_total: 20954875 block_used: 613649 device: /dev/vda1 fstype: xfs inode_available: 41888619 inode_total: 41942512 inode_used: 53893 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83317661696 size_total: 85831168000 uuid: 22ac9141-3960-4912-b20e-19fc8a328d40 ansible_nodename: controller ansible_os_family: RedHat ansible_parent_role_names: - cifmw_setup ansible_parent_role_paths: - /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/roles/cifmw_setup ansible_pkg_mgr: dnf ansible_play_batch: &id002 - controller ansible_play_hosts: - controller ansible_play_hosts_all: - controller - crc - standalone ansible_play_name: Run ci/playbooks/e2e-collect-logs.yml ansible_play_role_names: &id003 - run_hook - os_must_gather - artifacts - env_op_images - run_hook - cifmw_setup ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=22ac9141-3960-4912-b20e-19fc8a328d40 ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 8 ansible_processor_nproc: 8 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 8 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.3.1 ansible_python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 25 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 25 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.25 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_role_name: artifacts ansible_role_names: - os_must_gather - env_op_images - artifacts - run_hook - cifmw_setup ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMKcpNrJpAx4CrcANXc7Mn4hy3MVnmG3hOgRXXsREK9Nr1ubSroZOpt4L9Fqy2kwyrYygLJWw/0Ub6eRiHYEsC8= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIFnFPB1/0DZCQsBc2ZzNHDFdrc4p6KHouz9T+kXxP6Mv ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQC6HMCq1xxuJz4wWSauIns7H1KBFyS3Tk+19hW3WD1ndZ/bhbUbu/Rs5Ec0vOmp7s7EkHiuKXTatx4MjNzbUn6HsmYAZ8KDmxg1tlwm0YfSXEOBPVYxQj4PBNn4QD4CYpKyS1zS8Ely7Yym8eZ095ZwSz/BgCg/D7Tv5gEKNlMDY+Nh3eHp0yZHUCUd5znPesQynCClZjvXcU2ufx+FkKmwymn+yuVvC/scPXO17zyVxad783DV/4CFZEQD7NCTT0eX/8JTKhIBcNZlSHVmJZmRhdbl6ZKos4vLh3TecvWjEXfNuQL/Kco5I0KsbwWlSnBR0WPyVi06rg+gGruQdT2NEqJy9cGWkTLILsm201OrkG2ctFFqBnNqhSSHFCdHr89m2f5+FIx6vuPKo41YBK4Adz0RWxmR2/gZEIz7eVaPTNVtGS173eNxki861y7wowKHtZfqyo5YrgCJvtHDXprxnPqw6uMoOCOwCjfOV6IgjQfNH/R2CrKf9LRyF2/9pys= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 74 ansible_user: zuul ansible_user_dir: /home/zuul ansible_user_gecos: '' ansible_user_gid: 1000 ansible_user_id: zuul ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_architecture_repo: /home/zuul/src/github.com/openstack-k8s-operators/architecture cifmw_architecture_repo_relative: src/github.com/openstack-k8s-operators/architecture cifmw_artifacts_basedir: '{{ cifmw_basedir }}' cifmw_artifacts_crc_host: api.crc.testing cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_artifacts_crc_sshkey_ed25519: ~/.crc/machines/crc/id_ed25519 cifmw_artifacts_crc_user: core cifmw_artifacts_gather_logs: true cifmw_artifacts_mask_logs: true cifmw_basedir: /home/zuul/ci-framework-data cifmw_default_dns_servers: - 1.1.1.1 - 8.8.8.8 cifmw_dlrn_report_result: false cifmw_env_op_images_dir: '{{ cifmw_basedir }}' cifmw_env_op_images_dryrun: false cifmw_env_op_images_file: operator_images.yaml cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sE**********U= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_OS_IMG_TYPE: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: false BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 76**********f0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: CO**********6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '12**********42' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12**********78' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: os**********et SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' cifmw_install_yamls_environment: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm cifmw_installyamls_repos: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls cifmw_installyamls_repos_relative: src/github.com/openstack-k8s-operators/install_yamls cifmw_nolog: true cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_context: default/api-crc-testing:6443/kubeadmin cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_token: sha256~DRDkWcBbIdKn0u9Nxm-2md9dPVf-gVXLRNv2O8KcEp4 cifmw_openshift_user: kubeadmin cifmw_openstack_k8s_operators_org_url: https://github.com/openstack-k8s-operators cifmw_openstack_namespace: openstack cifmw_os_must_gather_additional_namespaces: kuttl,openshift-storage,openshift-marketplace,openshift-operators,sushy-emulator,tobiko cifmw_os_must_gather_dump_db: ALL cifmw_os_must_gather_host_network: false cifmw_os_must_gather_image: quay.io/openstack-k8s-operators/openstack-must-gather:latest cifmw_os_must_gather_image_push: true cifmw_os_must_gather_image_registry: quay.rdoproject.org/openstack-k8s-operators cifmw_os_must_gather_kubeconfig: '{{ ansible_user_dir }}/.kube/config' cifmw_os_must_gather_namespaces: - '{{ operator_namespace }}' - '{{ cifmw_openstack_namespace }}' - baremetal-operator-system - openshift-machine-api - cert-manager - openshift-nmstate - openshift-marketplace - metallb-system - crc-storage cifmw_os_must_gather_output_dir: '{{ cifmw_basedir }}' cifmw_os_must_gather_output_log_dir: '{{ cifmw_os_must_gather_output_dir }}/logs/openstack-must-gather' cifmw_os_must_gather_repo_path: '{{ ansible_user_dir }}/src/github.com/openstack-k8s-operators/openstack-must-gather' cifmw_os_must_gather_timeout: 30m cifmw_os_must_gather_volume_percentage: 80 cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cifmw_repo: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework cifmw_repo_relative: src/github.com/openstack-k8s-operators/ci-framework cifmw_run_hook_debug: '{{ (ansible_verbosity | int) >= 2 | bool }}' cifmw_status: changed: false failed: false stat: atime: 1769083465.461972 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: binary ctime: 1769083468.740053 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 150999587 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1769083468.740053 nlink: 21 path: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 4096 uid: 1000 version: '3297015793' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true cifmw_success_flag: changed: false failed: false stat: atime: 1769092060.8322425 attr_flags: '' attributes: [] block_size: 4096 blocks: 0 charset: binary checksum: da39a3ee5e6b4b0d3255bfef95601890afd80709 ctime: 1769092060.8322425 dev: 64513 device_type: 0 executable: false exists: true gid: 1000 gr_name: zuul inode: 4414570 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: inode/x-empty mode: '0644' mtime: 1769092060.8322425 nlink: 1 path: /home/zuul/cifmw-success pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 0 uid: 1000 version: '3766895722' wgrp: false woth: false writeable: true wusr: true xgrp: false xoth: false xusr: false cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_instance_default_net_config: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: config_nm: false ip: 172.17.0.100 - key: storage value: config_nm: false ip: 172.18.0.100 - key: storage_mgmt value: config_nm: false ip: 172.20.0.100 - key: tenant value: config_nm: false ip: 172.19.0.100 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2026-01-22T11:53:24Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-100.openstacklocal. hostname: host-192-168-122-100 ip_address: 192.168.122.100 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.100 subnet_id: 7a5d6965-a3da-4c6f-bc86-42f95e634461 hardware_offload_type: null hints: '' id: 6ee4c47b-3df3-4385-88f3-b108a8f825dd ip_allocation: immediate mac_address: fa:16:3e:b5:53:e5 name: standalone-71c2ce28-24b8-4530-b131-72e04889a182 network_id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2026-01-22T11:53:24Z' crc_ci_bootstrap_network_name: zuul-ci-net-3911aa2e crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 crc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:b2:1f:bf mtu: '1500' internal-api: connection: ci-private-network-20 iface: eth1.20 ip: 172.17.0.4/24 mac: 52:54:00:91:b6:a0 mtu: '1496' parent_iface: eth1 vlan: 20 storage: connection: ci-private-network-21 iface: eth1.21 ip: 172.18.0.4/24 mac: 52:54:00:8c:4a:57 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: eth1.23 ip: 172.20.0.4/24 mac: 52:54:00:e6:85:75 mtu: '1496' parent_iface: eth1 vlan: 23 tenant: connection: ci-private-network-22 iface: eth1.22 ip: 172.19.0.4/24 mac: 52:54:00:34:d6:b6 mtu: '1496' parent_iface: eth1 vlan: 22 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:ee:8d:ea mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:98:87:e6 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:fe:f9:a5 mtu: '1496' parent_iface: ens7 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: ens7.23 ip: 172.20.0.5/24 mac: 52:54:00:66:52:20 mtu: '1496' parent_iface: ens7 vlan: 23 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:c7:ca:3e mtu: '1496' parent_iface: ens7 vlan: 22 standalone: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:b5:53:e5 mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:87:a9:f6 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:4f:71:e3 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: iface: eth1.23 ip: 172.20.0.100/24 mac: 52:54:00:40:23:1f mtu: '1496' parent_iface: eth1 vlan: 23 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:f6:c7:15 mtu: '1496' parent_iface: eth1 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2026-01-22T11:51:59Z' description: '' dns_domain: '' id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: true l2_adjacency: true mtu: 1500 name: zuul-ci-net-3911aa2e port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2026-01-22T11:51:59Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2026-01-22T11:52:05Z' description: '' enable_ndp_proxy: null external_gateway_info: null flavor_id: null id: 68d8267d-56ed-4ee3-aa04-a2f7bd2ffe0b name: zuul-ci-subnet-router-3911aa2e project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 1 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2026-01-22T11:52:05Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2026-01-22T11:52:03Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: 7a5d6965-a3da-4c6f-bc86-42f95e634461 ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-3911aa2e network_id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2026-01-22T11:52:03Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-3911aa2e crc_ci_bootstrap_subnet_name: zuul-ci-subnet-3911aa2e discovered_interpreter_python: /usr/bin/python3 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' environment: - ANSIBLE_LOG_PATH: '{{ ansible_user_dir }}/ci-framework-data/logs/e2e-collect-logs-must-gather.log' gather_subset: - min group_names: - ungrouped groups: all: - controller - crc - standalone computes: [] ocps: - crc rh-subscription: - standalone ungrouped: &id001 - controller zuul_unreachable: [] hostvars: controller: _included_dir: changed: false failed: false stat: atime: 1769083616.899712 attr_flags: '' attributes: [] block_size: 4096 blocks: 0 charset: binary ctime: 1769083606.369452 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 21031368 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1769083606.369452 nlink: 2 path: /home/zuul/ci-framework-data/artifacts/parameters pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 120 uid: 1000 version: '341120645' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true _included_file: changed: false failed: false stat: atime: 1769083618.0127394 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: us-ascii checksum: 0f8bc97dff23c5aaddd2afa844d83c2d10d2979b ctime: 1769083605.6694345 dev: 64513 device_type: 0 executable: false exists: true gid: 1000 gr_name: zuul inode: 125913305 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: text/plain mode: '0600' mtime: 1769083605.48243 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml pw_name: zuul readable: true rgrp: false roth: false rusr: true size: 280 uid: 1000 version: '3749842572' wgrp: false woth: false writeable: true wusr: true xgrp: false xoth: false xusr: false _parsed_vars: changed: false content: Y2lmbXdfb3BlbnNoaWZ0X2FwaTogYXBpLmNyYy50ZXN0aW5nOjY0NDMKY2lmbXdfb3BlbnNoaWZ0X2NvbnRleHQ6IGRlZmF1bHQvYXBpLWNyYy10ZXN0aW5nOjY0NDMva3ViZWFkbWluCmNpZm13X29wZW5zaGlmdF9rdWJlY29uZmlnOiAvaG9tZS96dXVsLy5jcmMvbWFjaGluZXMvY3JjL2t1YmVjb25maWcKY2lmbXdfb3BlbnNoaWZ0X3Rva2VuOiBzaGEyNTZ+RFJEa1djQmJJZEtuMHU5TnhtLTJtZDlkUFZmLWdWWExSTnYyTzhLY0VwNApjaWZtd19vcGVuc2hpZnRfdXNlcjoga3ViZWFkbWluCg== encoding: base64 failed: false source: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml _tmp_dir: changed: true failed: false gid: 10001 group: zuul mode: '0700' owner: zuul path: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/tmp/ansible.twkksh2e size: 40 state: directory uid: 10001 _yaml_files: changed: false examined: 4 failed: false files: - atime: 1769083486.596494 ctime: 1769083475.0302083 dev: 64513 gid: 1000 gr_name: zuul inode: 37775555 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0644' mtime: 1769083474.3421915 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/zuul-params.yml pw_name: zuul rgrp: true roth: true rusr: true size: 15078 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1769083617.995739 ctime: 1769083606.372452 dev: 64513 gid: 1000 gr_name: zuul inode: 142638110 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0600' mtime: 1769083606.209448 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/install-yamls-params.yml pw_name: zuul rgrp: false roth: false rusr: true size: 28122 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1769083567.6534958 ctime: 1769083564.620421 dev: 64513 gid: 1000 gr_name: zuul inode: 96504617 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0644' mtime: 1769083564.4534168 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/custom-params.yml pw_name: zuul rgrp: true roth: true rusr: true size: 1252 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false - atime: 1769083618.0127394 ctime: 1769083605.6694345 dev: 64513 gid: 1000 gr_name: zuul inode: 125913305 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mode: '0600' mtime: 1769083605.48243 nlink: 1 path: /home/zuul/ci-framework-data/artifacts/parameters/openshift-login-params.yml pw_name: zuul rgrp: false roth: false rusr: true size: 280 uid: 1000 wgrp: false woth: false wusr: true xgrp: false xoth: false xusr: false matched: 4 msg: All paths examined skipped_paths: {} adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_all_ipv4_addresses: - 38.102.83.83 ansible_all_ipv6_addresses: - fe80::f816:3eff:fe4f:9ce2 ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=22ac9141-3960-4912-b20e-19fc8a328d40 ansible_config_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2026-01-22' day: '22' epoch: '1769092100' epoch_int: '1769092100' hour: '14' iso8601: '2026-01-22T14:28:20Z' iso8601_basic: 20260122T142820041113 iso8601_basic_short: 20260122T142820 iso8601_micro: '2026-01-22T14:28:20.041113Z' minute: '28' month: '01' second: '20' time: '14:28:20' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Thursday weekday_number: '4' weeknumber: '03' year: '2026' ansible_default_ipv4: address: 38.102.83.83 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:4f:9c:e2 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2026-01-22-11-49-38-00 vda1: - 22ac9141-3960-4912-b20e-19fc8a328d40 ansible_devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2026-01-22-11-49-38-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 22ac9141-3960-4912-b20e-19fc8a328d40 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: 22ac9141-3960-4912-b20e-19fc8a328d40 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: CentOS ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/centos-release ansible_distribution_file_variety: CentOS ansible_distribution_major_version: '9' ansible_distribution_release: Stream ansible_distribution_version: '9' ansible_dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 60544 22 SSH_CONNECTION: 38.102.83.114 60544 38.102.83.83 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '18' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f ansible_eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.83 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe4f:9ce2 prefix: '64' scope: link macaddress: fa:16:3e:4f:9c:e2 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.83 all_ipv6_addresses: - fe80::f816:3eff:fe4f:9ce2 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=22ac9141-3960-4912-b20e-19fc8a328d40 crc_ci_bootstrap_instance_default_net_config: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: config_nm: false ip: 172.17.0.100 - key: storage value: config_nm: false ip: 172.18.0.100 - key: storage_mgmt value: config_nm: false ip: 172.20.0.100 - key: tenant value: config_nm: false ip: 172.19.0.100 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2026-01-22T11:53:24Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-100.openstacklocal. hostname: host-192-168-122-100 ip_address: 192.168.122.100 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.100 subnet_id: 7a5d6965-a3da-4c6f-bc86-42f95e634461 hardware_offload_type: null hints: '' id: 6ee4c47b-3df3-4385-88f3-b108a8f825dd ip_allocation: immediate mac_address: fa:16:3e:b5:53:e5 name: standalone-71c2ce28-24b8-4530-b131-72e04889a182 network_id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2026-01-22T11:53:24Z' crc_ci_bootstrap_network_name: zuul-ci-net-3911aa2e crc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:b2:1f:bf mtu: '1500' internal-api: connection: ci-private-network-20 iface: eth1.20 ip: 172.17.0.4/24 mac: 52:54:00:91:b6:a0 mtu: '1496' parent_iface: eth1 vlan: 20 storage: connection: ci-private-network-21 iface: eth1.21 ip: 172.18.0.4/24 mac: 52:54:00:8c:4a:57 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: eth1.23 ip: 172.20.0.4/24 mac: 52:54:00:e6:85:75 mtu: '1496' parent_iface: eth1 vlan: 23 tenant: connection: ci-private-network-22 iface: eth1.22 ip: 172.19.0.4/24 mac: 52:54:00:34:d6:b6 mtu: '1496' parent_iface: eth1 vlan: 22 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:ee:8d:ea mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:98:87:e6 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:fe:f9:a5 mtu: '1496' parent_iface: ens7 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: ens7.23 ip: 172.20.0.5/24 mac: 52:54:00:66:52:20 mtu: '1496' parent_iface: ens7 vlan: 23 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:c7:ca:3e mtu: '1496' parent_iface: ens7 vlan: 22 standalone: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:b5:53:e5 mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:87:a9:f6 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:4f:71:e3 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: iface: eth1.23 ip: 172.20.0.100/24 mac: 52:54:00:40:23:1f mtu: '1496' parent_iface: eth1 vlan: 23 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:f6:c7:15 mtu: '1496' parent_iface: eth1 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2026-01-22T11:51:59Z' description: '' dns_domain: '' id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: true l2_adjacency: true mtu: 1500 name: zuul-ci-net-3911aa2e port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2026-01-22T11:51:59Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2026-01-22T11:52:05Z' description: '' enable_ndp_proxy: null external_gateway_info: null flavor_id: null id: 68d8267d-56ed-4ee3-aa04-a2f7bd2ffe0b name: zuul-ci-subnet-router-3911aa2e project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 1 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2026-01-22T11:52:05Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2026-01-22T11:52:03Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: 7a5d6965-a3da-4c6f-bc86-42f95e634461 ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-3911aa2e network_id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2026-01-22T11:52:03Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-3911aa2e crc_ci_bootstrap_subnet_name: zuul-ci-subnet-3911aa2e date_time: date: '2026-01-22' day: '22' epoch: '1769092100' epoch_int: '1769092100' hour: '14' iso8601: '2026-01-22T14:28:20Z' iso8601_basic: 20260122T142820041113 iso8601_basic_short: 20260122T142820 iso8601_micro: '2026-01-22T14:28:20.041113Z' minute: '28' month: '01' second: '20' time: '14:28:20' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Thursday weekday_number: '4' weeknumber: '03' year: '2026' default_ipv4: address: 38.102.83.83 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:4f:9c:e2 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2026-01-22-11-49-38-00 vda1: - 22ac9141-3960-4912-b20e-19fc8a328d40 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2026-01-22-11-49-38-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 22ac9141-3960-4912-b20e-19fc8a328d40 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: 22ac9141-3960-4912-b20e-19fc8a328d40 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 60544 22 SSH_CONNECTION: 38.102.83.114 60544 38.102.83.83 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '18' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.83 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe4f:9ce2 prefix: '64' scope: link macaddress: fa:16:3e:4f:9c:e2 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: controller gather_subset: - min hostname: controller hostnqn: nqn.2014-08.org.nvmexpress:uuid:5350774e-8b5e-4dba-80a9-92d405981c1d interfaces: - lo - eth0 is_chroot: false iscsi_iqn: '' kernel: 5.14.0-661.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Jan 16 09:19:22 UTC 2026' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.05 1m: 0.39 5m: 0.14 locally_reachable_ips: ipv4: - 38.102.83.83 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe4f:9ce2 lsb: {} lvm: N/A machine: x86_64 machine_id: 85ac68c10a6e7ae08ceb898dbdca0cb5 memfree_mb: 7084 memory_mb: nocache: free: 7292 used: 387 real: free: 7084 total: 7679 used: 595 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 7679 module_setup: true mounts: - block_available: 20341226 block_size: 4096 block_total: 20954875 block_used: 613649 device: /dev/vda1 fstype: xfs inode_available: 41888619 inode_total: 41942512 inode_used: 53893 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83317661696 size_total: 85831168000 uuid: 22ac9141-3960-4912-b20e-19fc8a328d40 nodename: controller os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=22ac9141-3960-4912-b20e-19fc8a328d40 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 8 processor_nproc: 8 processor_threads_per_core: 1 processor_vcpus: 8 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.3.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 25 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 25 - final - 0 python_version: 3.9.25 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMKcpNrJpAx4CrcANXc7Mn4hy3MVnmG3hOgRXXsREK9Nr1ubSroZOpt4L9Fqy2kwyrYygLJWw/0Ub6eRiHYEsC8= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIFnFPB1/0DZCQsBc2ZzNHDFdrc4p6KHouz9T+kXxP6Mv ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQC6HMCq1xxuJz4wWSauIns7H1KBFyS3Tk+19hW3WD1ndZ/bhbUbu/Rs5Ec0vOmp7s7EkHiuKXTatx4MjNzbUn6HsmYAZ8KDmxg1tlwm0YfSXEOBPVYxQj4PBNn4QD4CYpKyS1zS8Ely7Yym8eZ095ZwSz/BgCg/D7Tv5gEKNlMDY+Nh3eHp0yZHUCUd5znPesQynCClZjvXcU2ufx+FkKmwymn+yuVvC/scPXO17zyVxad783DV/4CFZEQD7NCTT0eX/8JTKhIBcNZlSHVmJZmRhdbl6ZKos4vLh3TecvWjEXfNuQL/Kco5I0KsbwWlSnBR0WPyVi06rg+gGruQdT2NEqJy9cGWkTLILsm201OrkG2ctFFqBnNqhSSHFCdHr89m2f5+FIx6vuPKo41YBK4Adz0RWxmR2/gZEIz7eVaPTNVtGS173eNxki861y7wowKHtZfqyo5YrgCJvtHDXprxnPqw6uMoOCOwCjfOV6IgjQfNH/R2CrKf9LRyF2/9pys= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 74 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack zuul_change_list: - data-plane-adoption ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: controller ansible_host: 38.102.83.83 ansible_hostname: controller ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:5350774e-8b5e-4dba-80a9-92d405981c1d ansible_interfaces: - lo - eth0 ansible_inventory_sources: - /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/inventory.yaml ansible_is_chroot: false ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-661.el9.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Jan 16 09:19:22 UTC 2026' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.05 1m: 0.39 5m: 0.14 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.83 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe4f:9ce2 ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 85ac68c10a6e7ae08ceb898dbdca0cb5 ansible_memfree_mb: 7084 ansible_memory_mb: nocache: free: 7292 used: 387 real: free: 7084 total: 7679 used: 595 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 7679 ansible_mounts: - block_available: 20341226 block_size: 4096 block_total: 20954875 block_used: 613649 device: /dev/vda1 fstype: xfs inode_available: 41888619 inode_total: 41942512 inode_used: 53893 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83317661696 size_total: 85831168000 uuid: 22ac9141-3960-4912-b20e-19fc8a328d40 ansible_nodename: controller ansible_os_family: RedHat ansible_pkg_mgr: dnf ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=22ac9141-3960-4912-b20e-19fc8a328d40 ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 8 ansible_processor_nproc: 8 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 8 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.3.1 ansible_python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 25 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 25 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.25 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMKcpNrJpAx4CrcANXc7Mn4hy3MVnmG3hOgRXXsREK9Nr1ubSroZOpt4L9Fqy2kwyrYygLJWw/0Ub6eRiHYEsC8= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIFnFPB1/0DZCQsBc2ZzNHDFdrc4p6KHouz9T+kXxP6Mv ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQC6HMCq1xxuJz4wWSauIns7H1KBFyS3Tk+19hW3WD1ndZ/bhbUbu/Rs5Ec0vOmp7s7EkHiuKXTatx4MjNzbUn6HsmYAZ8KDmxg1tlwm0YfSXEOBPVYxQj4PBNn4QD4CYpKyS1zS8Ely7Yym8eZ095ZwSz/BgCg/D7Tv5gEKNlMDY+Nh3eHp0yZHUCUd5znPesQynCClZjvXcU2ufx+FkKmwymn+yuVvC/scPXO17zyVxad783DV/4CFZEQD7NCTT0eX/8JTKhIBcNZlSHVmJZmRhdbl6ZKos4vLh3TecvWjEXfNuQL/Kco5I0KsbwWlSnBR0WPyVi06rg+gGruQdT2NEqJy9cGWkTLILsm201OrkG2ctFFqBnNqhSSHFCdHr89m2f5+FIx6vuPKo41YBK4Adz0RWxmR2/gZEIz7eVaPTNVtGS173eNxki861y7wowKHtZfqyo5YrgCJvtHDXprxnPqw6uMoOCOwCjfOV6IgjQfNH/R2CrKf9LRyF2/9pys= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 74 ansible_user: zuul ansible_user_dir: /home/zuul ansible_user_gecos: '' ansible_user_gid: 1000 ansible_user_id: zuul ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_architecture_repo: /home/zuul/src/github.com/openstack-k8s-operators/architecture cifmw_architecture_repo_relative: src/github.com/openstack-k8s-operators/architecture cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_basedir: /home/zuul/ci-framework-data cifmw_default_dns_servers: - 1.1.1.1 - 8.8.8.8 cifmw_dlrn_report_result: false cifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sE**********U= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_OS_IMG_TYPE: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: false BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 76**********f0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: CO**********6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '12**********42' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12**********78' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: os**********et SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' cifmw_install_yamls_environment: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm cifmw_installyamls_repos: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls cifmw_installyamls_repos_relative: src/github.com/openstack-k8s-operators/install_yamls cifmw_nolog: true cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_context: default/api-crc-testing:6443/kubeadmin cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_token: sha256~DRDkWcBbIdKn0u9Nxm-2md9dPVf-gVXLRNv2O8KcEp4 cifmw_openshift_user: kubeadmin cifmw_openstack_k8s_operators_org_url: https://github.com/openstack-k8s-operators cifmw_openstack_namespace: openstack cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cifmw_repo: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework cifmw_repo_relative: src/github.com/openstack-k8s-operators/ci-framework cifmw_status: changed: false failed: false stat: atime: 1769083465.461972 attr_flags: '' attributes: [] block_size: 4096 blocks: 8 charset: binary ctime: 1769083468.740053 dev: 64513 device_type: 0 executable: true exists: true gid: 1000 gr_name: zuul inode: 150999587 isblk: false ischr: false isdir: true isfifo: false isgid: false islnk: false isreg: false issock: false isuid: false mimetype: inode/directory mode: '0755' mtime: 1769083468.740053 nlink: 21 path: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 4096 uid: 1000 version: '3297015793' wgrp: false woth: false writeable: true wusr: true xgrp: true xoth: true xusr: true cifmw_success_flag: changed: false failed: false stat: atime: 1769092060.8322425 attr_flags: '' attributes: [] block_size: 4096 blocks: 0 charset: binary checksum: da39a3ee5e6b4b0d3255bfef95601890afd80709 ctime: 1769092060.8322425 dev: 64513 device_type: 0 executable: false exists: true gid: 1000 gr_name: zuul inode: 4414570 isblk: false ischr: false isdir: false isfifo: false isgid: false islnk: false isreg: true issock: false isuid: false mimetype: inode/x-empty mode: '0644' mtime: 1769092060.8322425 nlink: 1 path: /home/zuul/cifmw-success pw_name: zuul readable: true rgrp: true roth: true rusr: true size: 0 uid: 1000 version: '3766895722' wgrp: false woth: false writeable: true wusr: true xgrp: false xoth: false xusr: false cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: vexxhost crc_ci_bootstrap_instance_default_net_config: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: config_nm: false ip: 172.17.0.100 - key: storage value: config_nm: false ip: 172.18.0.100 - key: storage_mgmt value: config_nm: false ip: 172.20.0.100 - key: tenant value: config_nm: false ip: 172.19.0.100 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2026-01-22T11:53:24Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-100.openstacklocal. hostname: host-192-168-122-100 ip_address: 192.168.122.100 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.100 subnet_id: 7a5d6965-a3da-4c6f-bc86-42f95e634461 hardware_offload_type: null hints: '' id: 6ee4c47b-3df3-4385-88f3-b108a8f825dd ip_allocation: immediate mac_address: fa:16:3e:b5:53:e5 name: standalone-71c2ce28-24b8-4530-b131-72e04889a182 network_id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2026-01-22T11:53:24Z' crc_ci_bootstrap_network_name: zuul-ci-net-3911aa2e crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 crc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:b2:1f:bf mtu: '1500' internal-api: connection: ci-private-network-20 iface: eth1.20 ip: 172.17.0.4/24 mac: 52:54:00:91:b6:a0 mtu: '1496' parent_iface: eth1 vlan: 20 storage: connection: ci-private-network-21 iface: eth1.21 ip: 172.18.0.4/24 mac: 52:54:00:8c:4a:57 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: eth1.23 ip: 172.20.0.4/24 mac: 52:54:00:e6:85:75 mtu: '1496' parent_iface: eth1 vlan: 23 tenant: connection: ci-private-network-22 iface: eth1.22 ip: 172.19.0.4/24 mac: 52:54:00:34:d6:b6 mtu: '1496' parent_iface: eth1 vlan: 22 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:ee:8d:ea mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:98:87:e6 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:fe:f9:a5 mtu: '1496' parent_iface: ens7 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: ens7.23 ip: 172.20.0.5/24 mac: 52:54:00:66:52:20 mtu: '1496' parent_iface: ens7 vlan: 23 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:c7:ca:3e mtu: '1496' parent_iface: ens7 vlan: 22 standalone: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:b5:53:e5 mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:87:a9:f6 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:4f:71:e3 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: iface: eth1.23 ip: 172.20.0.100/24 mac: 52:54:00:40:23:1f mtu: '1496' parent_iface: eth1 vlan: 23 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:f6:c7:15 mtu: '1496' parent_iface: eth1 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2026-01-22T11:51:59Z' description: '' dns_domain: '' id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: true l2_adjacency: true mtu: 1500 name: zuul-ci-net-3911aa2e port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2026-01-22T11:51:59Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2026-01-22T11:52:05Z' description: '' enable_ndp_proxy: null external_gateway_info: null flavor_id: null id: 68d8267d-56ed-4ee3-aa04-a2f7bd2ffe0b name: zuul-ci-subnet-router-3911aa2e project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 1 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2026-01-22T11:52:05Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2026-01-22T11:52:03Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: 7a5d6965-a3da-4c6f-bc86-42f95e634461 ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-3911aa2e network_id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2026-01-22T11:52:03Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-3911aa2e crc_ci_bootstrap_subnet_name: zuul-ci-subnet-3911aa2e discovered_interpreter_python: /usr/bin/python3 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' gather_subset: - min group_names: - ungrouped groups: all: - controller - crc - standalone computes: [] ocps: - crc rh-subscription: - standalone ungrouped: *id001 zuul_unreachable: [] inventory_dir: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4 inventory_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/inventory.yaml inventory_hostname: controller inventory_hostname_short: controller logfiles_dest_dir: /home/zuul/ci-framework-data/logs/2026-01-22_14-28 module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 8e2c7ff5-ac7b-45b2-a9c5-05e2d4922b4b host_id: bdb78bf25a270582fae0ca49d447ffffc4c7a50a772a0a4c0593588a interface_ip: 38.102.83.83 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.83 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.83 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__10ecd797ec9368ecd51cdd69cf5ce40a70e7da09 operator_namespace: openstack-operators osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms playbook_dir: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory unsafe_vars: adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_connection: ssh ansible_host: 38.102.83.83 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_dlrn_report_result: false cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 8e2c7ff5-ac7b-45b2-a9c5-05e2d4922b4b host_id: bdb78bf25a270582fae0ca49d447ffffc4c7a50a772a0a4c0593588a interface_ip: 38.102.83.83 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.83 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.83 public_ipv6: '' region: RegionOne slot: null osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul_log_collection: true upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 3911aa2e57464e5ea41c24d6ab361757 build_refs: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null buildset: ae39f936defb47d8b8026507b61685a3 buildset_refs: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 child_jobs: [] commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 event_id: 46eb15f0-f788-11f0-870c-f967a8119d20 executor: hostname: ze04.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/inventory.yaml log_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/logs result_data_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/results.json src_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/src work_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work items: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null job: adoption-standalone-to-crc-no-ceph jobtags: [] max_attempts: 1 message: dW5pMDRkZWx0YS1pcHY2OiBObyBPVk4gREhDUCBmb3IgYmFyZW1ldGFsIHBvcnRzCgpBZGRzIGBvdm4vZGlzYWJsZV9vdm5fZGhjcF9mb3JfYmFyZW1ldGFsX3BvcnRzYCBjb25maWd1cmF0aW9uIHRvIHRoZSB1bmkwNGRlbHRhLWlwdjYgYGNvbmZpZ19kb3dubG9hZC55YW1sYCB1c2luZyBgbmV1dHJvbjo6Y29uZmlnOjpwbHVnaW5fbWwyX2NvbmZpZ2AuDQogICAgDQpUaGlzIGRpc2FibGVzIE9WTidzIGJ1aWx0LWluIERIQ1Agc2VydmljZSBmb3IgYmFyZW1ldGFsIHBvcnRzLCBhbGxvd2luZyBleHRlcm5hbCBESENQIHRvIGJlIHVzZWQgaW5zdGVhZC4gVXNlcyB0aGUgY29uZmlnIGNsYXNzIGFwcHJvYWNoIHNpbmNlIHRoZSBkaXJlY3QgcHVwcGV0IHBhcmFtZXRlciB3YXMgbm90IGJhY2twb3J0ZWQgdG8gT1NQIDE3LjEuDQoNCkppcmE6IFtPU1BSSC0yMDAyMV0oaHR0cHM6Ly9pc3N1ZXMucmVkaGF0LmNvbS8vYnJvd3NlL09TUFJILTIwMDIxKQ== patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 playbooks: - path: untrusted/project_3/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/deploy_standalone_run_repo_tests.yaml roles: - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/architecture: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/architecture checkout: main checkout_description: project override ref commit: 2eaa1ef0f72a4e7ef0a5042b135993cbfc51eacc name: openstack-k8s-operators/architecture required: true short_name: architecture src_dir: src/github.com/openstack-k8s-operators/architecture github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/data-plane-adoption: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption checkout: main checkout_description: project override ref commit: 7346bd354c161fbd39016de93e1981fb8edfe179 name: openstack-k8s-operators/data-plane-adoption required: true short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project override ref commit: 13897053815e2df424a56208aa288cf95b7283d1 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config review.rdoproject.org/rdo-jobs: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/rdo-jobs checkout: master checkout_description: project default branch commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 name: rdo-jobs required: true short_name: rdo-jobs src_dir: src/review.rdoproject.org/rdo-jobs ref: refs/pull/1202/head resources: {} tenant: rdoproject.org timeout: 14400 topic: null voting: true zuul_change_list: - data-plane-adoption zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '4' zuul_execution_trusted: 'False' zuul_log_collection: true zuul_success: 'True' zuul_will_retry: 'False' crc: adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_all_ipv4_addresses: - 192.168.126.11 - 38.102.83.97 ansible_all_ipv6_addresses: - fe80::7ac8:e089:46be:52cb ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_br_int: active: false device: br-int features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: d6:39:55:2e:22:71 mtu: 1400 promisc: true timestamping: [] type: ether ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-222c9618de9c45c63e246dddef9ff68a7a0a02c06c0f77615561166d9e2254ea/vmlinuz-5.14.0-427.50.2.el9_4.x86_64 boot: UUID=de0497b0-db1b-465a-b278-03db02455c71 cgroup_no_v1: all ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/222c9618de9c45c63e246dddef9ff68a7a0a02c06c0f77615561166d9e2254ea/0 psi: '0' root: UUID=0b076daa-c26a-46d2-b3a6-72a8dbc6e257 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' ansible_config_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2026-01-22' day: '22' epoch: '1769082663' epoch_int: '1769082663' hour: '11' iso8601: '2026-01-22T11:51:03Z' iso8601_basic: 20260122T115103382062 iso8601_basic_short: 20260122T115103 iso8601_micro: '2026-01-22T11:51:03.382062Z' minute: '51' month: '01' second: '03' time: '11:51:03' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Thursday weekday_number: '4' weeknumber: '03' year: '2026' ansible_default_ipv4: address: 38.102.83.97 alias: ens3 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: ens3 macaddress: fa:16:3e:cb:ca:73 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 vda2: - EFI-SYSTEM vda3: - boot vda4: - root masters: {} uuids: sr0: - 2026-01-22-11-49-36-00 vda2: - 7B77-95E7 vda3: - de0497b0-db1b-465a-b278-03db02455c71 vda4: - 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 ansible_devices: sr0: holders: [] host: 'IDE interface: Intel Corporation 82371SB PIIX3 IDE [Natoma/Triton II]' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2026-01-22-11-49-36-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '0' vendor: QEMU virtual: 1 vda: holders: [] host: 'SCSI storage controller: Red Hat, Inc. Virtio block device' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: [] sectors: '2048' sectorsize: 512 size: 1.00 MB start: '2048' uuid: null vda2: holders: [] links: ids: [] labels: - EFI-SYSTEM masters: [] uuids: - 7B77-95E7 sectors: '260096' sectorsize: 512 size: 127.00 MB start: '4096' uuid: 7B77-95E7 vda3: holders: [] links: ids: [] labels: - boot masters: [] uuids: - de0497b0-db1b-465a-b278-03db02455c71 sectors: '786432' sectorsize: 512 size: 384.00 MB start: '264192' uuid: de0497b0-db1b-465a-b278-03db02455c71 vda4: holders: [] links: ids: [] labels: - root masters: [] uuids: - 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 sectors: '166718848' sectorsize: 512 size: 79.50 GB start: '1050624' uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '419430400' sectorsize: '512' size: 200.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: RedHat ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/redhat-release ansible_distribution_file_search_string: Red Hat ansible_distribution_file_variety: RedHat ansible_distribution_major_version: '4' ansible_distribution_release: NA ansible_distribution_version: '4.18' ansible_dns: nameservers: - 199.204.44.24 - 199.204.47.54 ansible_domain: '' ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_ens3: active: true device: ens3 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.97 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::7ac8:e089:46be:52cb prefix: '64' scope: link macaddress: fa:16:3e:cb:ca:73 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus HOME: /var/home/core LANG: C.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: core MOTD_SHOWN: pam PATH: /var/home/core/.local/bin:/var/home/core/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /var/home/core SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 44864 22 SSH_CONNECTION: 38.102.83.114 44864 38.102.83.97 22 USER: core XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '2' XDG_SESSION_TYPE: tty _: /usr/bin/python3.9 which_declare: declare -f ansible_eth10: active: true device: eth10 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 192.168.126.11 broadcast: 192.168.126.255 netmask: 255.255.255.0 network: 192.168.126.0 prefix: '24' macaddress: ca:7c:95:fb:82:08 mtu: 1500 promisc: false timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 192.168.126.11 - 38.102.83.97 all_ipv6_addresses: - fe80::7ac8:e089:46be:52cb ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA br_int: active: false device: br-int features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: d6:39:55:2e:22:71 mtu: 1400 promisc: true timestamping: [] type: ether chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-222c9618de9c45c63e246dddef9ff68a7a0a02c06c0f77615561166d9e2254ea/vmlinuz-5.14.0-427.50.2.el9_4.x86_64 boot: UUID=de0497b0-db1b-465a-b278-03db02455c71 cgroup_no_v1: all ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/222c9618de9c45c63e246dddef9ff68a7a0a02c06c0f77615561166d9e2254ea/0 psi: '0' root: UUID=0b076daa-c26a-46d2-b3a6-72a8dbc6e257 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' date_time: date: '2026-01-22' day: '22' epoch: '1769082663' epoch_int: '1769082663' hour: '11' iso8601: '2026-01-22T11:51:03Z' iso8601_basic: 20260122T115103382062 iso8601_basic_short: 20260122T115103 iso8601_micro: '2026-01-22T11:51:03.382062Z' minute: '51' month: '01' second: '03' time: '11:51:03' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Thursday weekday_number: '4' weeknumber: '03' year: '2026' default_ipv4: address: 38.102.83.97 alias: ens3 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: ens3 macaddress: fa:16:3e:cb:ca:73 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 vda2: - EFI-SYSTEM vda3: - boot vda4: - root masters: {} uuids: sr0: - 2026-01-22-11-49-36-00 vda2: - 7B77-95E7 vda3: - de0497b0-db1b-465a-b278-03db02455c71 vda4: - 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 devices: sr0: holders: [] host: 'IDE interface: Intel Corporation 82371SB PIIX3 IDE [Natoma/Triton II]' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2026-01-22-11-49-36-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '0' vendor: QEMU virtual: 1 vda: holders: [] host: 'SCSI storage controller: Red Hat, Inc. Virtio block device' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: [] sectors: '2048' sectorsize: 512 size: 1.00 MB start: '2048' uuid: null vda2: holders: [] links: ids: [] labels: - EFI-SYSTEM masters: [] uuids: - 7B77-95E7 sectors: '260096' sectorsize: 512 size: 127.00 MB start: '4096' uuid: 7B77-95E7 vda3: holders: [] links: ids: [] labels: - boot masters: [] uuids: - de0497b0-db1b-465a-b278-03db02455c71 sectors: '786432' sectorsize: 512 size: 384.00 MB start: '264192' uuid: de0497b0-db1b-465a-b278-03db02455c71 vda4: holders: [] links: ids: [] labels: - root masters: [] uuids: - 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 sectors: '166718848' sectorsize: 512 size: 79.50 GB start: '1050624' uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '419430400' sectorsize: '512' size: 200.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3.9 distribution: RedHat distribution_file_parsed: true distribution_file_path: /etc/redhat-release distribution_file_search_string: Red Hat distribution_file_variety: RedHat distribution_major_version: '4' distribution_release: NA distribution_version: '4.18' dns: nameservers: - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 ens3: active: true device: ens3 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.97 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::7ac8:e089:46be:52cb prefix: '64' scope: link macaddress: fa:16:3e:cb:ca:73 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus HOME: /var/home/core LANG: C.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: core MOTD_SHOWN: pam PATH: /var/home/core/.local/bin:/var/home/core/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /var/home/core SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 44864 22 SSH_CONNECTION: 38.102.83.114 44864 38.102.83.97 22 USER: core XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '2' XDG_SESSION_TYPE: tty _: /usr/bin/python3.9 which_declare: declare -f eth10: active: true device: eth10 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 192.168.126.11 broadcast: 192.168.126.255 netmask: 255.255.255.0 network: 192.168.126.0 prefix: '24' macaddress: ca:7c:95:fb:82:08 mtu: 1500 promisc: false timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: crc gather_subset: - all hostname: crc hostnqn: nqn.2014-08.org.nvmexpress:uuid:5868846d-b985-4e9b-a1e6-70725bc6eac4 interfaces: - br-int - ovs-system - lo - eth10 - ovn-k8s-mp0 - ens3 is_chroot: true iscsi_iqn: '' kernel: 5.14.0-427.50.2.el9_4.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Feb 7 09:29:50 EST 2025' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] netns_local: on [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: on [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.02 1m: 0.2 5m: 0.07 locally_reachable_ips: ipv4: - 38.102.83.97 - 127.0.0.0/8 - 127.0.0.1 - 192.168.126.11 ipv6: - ::1 - fe80::7ac8:e089:46be:52cb lsb: {} lvm: N/A machine: x86_64 machine_id: 21801e6708c44f15b81395eb736a7cec memfree_mb: 31513 memory_mb: nocache: free: 31716 used: 379 real: free: 31513 total: 32095 used: 582 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 32095 module_setup: true mounts: - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: /sysroot options: ro,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: /etc options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: /usr options: ro,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: /sysroot/ostree/deploy/rhcos/var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: /var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 220687 block_size: 1024 block_total: 358271 block_used: 137584 device: /dev/vda3 fstype: ext4 inode_available: 97936 inode_total: 98304 inode_used: 368 mount: /boot options: ro,seclabel,nosuid,nodev,relatime size_available: 225983488 size_total: 366869504 uuid: de0497b0-db1b-465a-b278-03db02455c71 - block_available: 0 block_size: 2048 block_total: 241 block_used: 241 device: /dev/sr0 fstype: iso9660 inode_available: 0 inode_total: 0 inode_used: 0 mount: /tmp/openstack-config-drive options: ro,relatime,nojoliet,check=s,map=n,blocksize=2048 size_available: 0 size_total: 493568 uuid: 2026-01-22-11-49-36-00 nodename: crc os_family: RedHat ovn_k8s_mp0: active: false device: ovn-k8s-mp0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 0a:58:0a:d9:00:02 mtu: 1400 promisc: true timestamping: [] type: ether ovs_system: active: false device: ovs-system features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: on [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: fa:59:79:6a:4d:91 mtu: 1500 promisc: true timestamping: [] type: ether pkg_mgr: atomic_container proc_cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-222c9618de9c45c63e246dddef9ff68a7a0a02c06c0f77615561166d9e2254ea/vmlinuz-5.14.0-427.50.2.el9_4.x86_64 boot: UUID=de0497b0-db1b-465a-b278-03db02455c71 cgroup_no_v1: all ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/222c9618de9c45c63e246dddef9ff68a7a0a02c06c0f77615561166d9e2254ea/0 psi: '0' root: UUID=0b076daa-c26a-46d2-b3a6-72a8dbc6e257 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor - '8' - AuthenticAMD - AMD EPYC-Rome Processor - '9' - AuthenticAMD - AMD EPYC-Rome Processor - '10' - AuthenticAMD - AMD EPYC-Rome Processor - '11' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 12 processor_nproc: 12 processor_threads_per_core: 1 processor_vcpus: 12 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.3.1 python: executable: /usr/bin/python3.9 has_sslcontext: true type: cpython version: major: 3 micro: 18 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 18 - final - 0 python_version: 3.9.18 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBM2gL1NPmTDa3suVZVEFlN8tPkayDFQSSTCPBpXJWQc3prsrt6ZyqK7Td0ww3tUUrsgsfAU/zg0wlL8Uy7glFNY= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIOBf2eYlZCq3ti7hpf/c/0cPvvnQl+7GDtrgSQLW+P8M ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQCdFRMPX7MELHeXOXX3AiRE+YCx8qkH6fWyHDvxjIPlNjC7fN3C7MUv2UkRbGNwHilJova9iQLM6fZmHa1AcnwQOjgoQZunWkH7gGzqE3oB5NluUPuOiZ90ukh1xDK1SBdpIGsgRnmuoPlV5he8qFF35Y3fohe0WQEt5OQw8TUT9TMtwsyznLAs/0GJ590sN+PHh232WJOB6T7/zdEzmN8i/mPqVOYXem7yawqiMMiOkgBsfKUXI34ejxm2ltpWLk6GjT3cZKhfL2kxBPtR0WY04BZMgelDGxZBOhGHHZDIuqXcQozF29wH6ybFPwQ03yePNDUSz9xYAhPGs5ec6sWDRgXwa1FG/lgPuHw/rzuqilyK+JsBbIdjXvQ4bDG4kZd9C++k0jICLGhwKpflEGKoX+M7BZXkrjocVBLI1WM+Xfqi6lP1qI5HYvXzo70YVdAbs2l4F2LSJwjWQnUWfJgWdceJJXcsiboc5LUNn6AE+27E6CRuV7UMC9EFKO/Ltqc= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 77 user_dir: /var/home/core user_gecos: CoreOS Admin user_gid: 1000 user_id: core user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: crc ansible_host: 38.102.83.97 ansible_hostname: crc ansible_hostnqn: nqn.2014-08.org.nvmexpress:uuid:5868846d-b985-4e9b-a1e6-70725bc6eac4 ansible_interfaces: - br-int - ovs-system - lo - eth10 - ovn-k8s-mp0 - ens3 ansible_inventory_sources: - /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/inventory.yaml ansible_is_chroot: true ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-427.50.2.el9_4.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Feb 7 09:29:50 EST 2025' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] netns_local: on [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: on [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.02 1m: 0.2 5m: 0.07 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.97 - 127.0.0.0/8 - 127.0.0.1 - 192.168.126.11 ipv6: - ::1 - fe80::7ac8:e089:46be:52cb ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 21801e6708c44f15b81395eb736a7cec ansible_memfree_mb: 31513 ansible_memory_mb: nocache: free: 31716 used: 379 real: free: 31513 total: 32095 used: 582 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 32095 ansible_mounts: - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: /sysroot options: ro,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: /etc options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: /usr options: ro,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: /sysroot/ostree/deploy/rhcos/var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 15532522 block_size: 4096 block_total: 20823472 block_used: 5290950 device: /dev/vda4 fstype: xfs inode_available: 41561682 inode_total: 41679680 inode_used: 117998 mount: /var options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,prjquota,bind size_available: 63621210112 size_total: 85292941312 uuid: 0b076daa-c26a-46d2-b3a6-72a8dbc6e257 - block_available: 220687 block_size: 1024 block_total: 358271 block_used: 137584 device: /dev/vda3 fstype: ext4 inode_available: 97936 inode_total: 98304 inode_used: 368 mount: /boot options: ro,seclabel,nosuid,nodev,relatime size_available: 225983488 size_total: 366869504 uuid: de0497b0-db1b-465a-b278-03db02455c71 - block_available: 0 block_size: 2048 block_total: 241 block_used: 241 device: /dev/sr0 fstype: iso9660 inode_available: 0 inode_total: 0 inode_used: 0 mount: /tmp/openstack-config-drive options: ro,relatime,nojoliet,check=s,map=n,blocksize=2048 size_available: 0 size_total: 493568 uuid: 2026-01-22-11-49-36-00 ansible_nodename: crc ansible_os_family: RedHat ansible_ovn_k8s_mp0: active: false device: ovn-k8s-mp0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: 0a:58:0a:d9:00:02 mtu: 1400 promisc: true timestamping: [] type: ether ansible_ovs_system: active: false device: ovs-system features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: 'on' hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: on [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: off [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: 'on' tx_gre_segmentation: 'on' tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: 'on' tx_ipxip6_segmentation: 'on' tx_lockless: on [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: 'on' tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: 'on' tx_udp_tnl_segmentation: 'on' tx_vlan_offload: 'on' tx_vlan_stag_hw_insert: 'on' vlan_challenged: off [fixed] hw_timestamp_filters: [] macaddress: fa:59:79:6a:4d:91 mtu: 1500 promisc: true timestamping: [] type: ether ansible_pkg_mgr: atomic_container ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,gpt3)/boot/ostree/rhcos-222c9618de9c45c63e246dddef9ff68a7a0a02c06c0f77615561166d9e2254ea/vmlinuz-5.14.0-427.50.2.el9_4.x86_64 boot: UUID=de0497b0-db1b-465a-b278-03db02455c71 cgroup_no_v1: all ignition.platform.id: metal ostree: /ostree/boot.1/rhcos/222c9618de9c45c63e246dddef9ff68a7a0a02c06c0f77615561166d9e2254ea/0 psi: '0' root: UUID=0b076daa-c26a-46d2-b3a6-72a8dbc6e257 rootflags: prjquota rw: true systemd.unified_cgroup_hierarchy: '1' ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor - '8' - AuthenticAMD - AMD EPYC-Rome Processor - '9' - AuthenticAMD - AMD EPYC-Rome Processor - '10' - AuthenticAMD - AMD EPYC-Rome Processor - '11' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 12 ansible_processor_nproc: 12 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 12 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.3.1 ansible_python: executable: /usr/bin/python3.9 has_sslcontext: true type: cpython version: major: 3 micro: 18 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 18 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.18 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBM2gL1NPmTDa3suVZVEFlN8tPkayDFQSSTCPBpXJWQc3prsrt6ZyqK7Td0ww3tUUrsgsfAU/zg0wlL8Uy7glFNY= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIOBf2eYlZCq3ti7hpf/c/0cPvvnQl+7GDtrgSQLW+P8M ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQCdFRMPX7MELHeXOXX3AiRE+YCx8qkH6fWyHDvxjIPlNjC7fN3C7MUv2UkRbGNwHilJova9iQLM6fZmHa1AcnwQOjgoQZunWkH7gGzqE3oB5NluUPuOiZ90ukh1xDK1SBdpIGsgRnmuoPlV5he8qFF35Y3fohe0WQEt5OQw8TUT9TMtwsyznLAs/0GJ590sN+PHh232WJOB6T7/zdEzmN8i/mPqVOYXem7yawqiMMiOkgBsfKUXI34ejxm2ltpWLk6GjT3cZKhfL2kxBPtR0WY04BZMgelDGxZBOhGHHZDIuqXcQozF29wH6ybFPwQ03yePNDUSz9xYAhPGs5ec6sWDRgXwa1FG/lgPuHw/rzuqilyK+JsBbIdjXvQ4bDG4kZd9C++k0jICLGhwKpflEGKoX+M7BZXkrjocVBLI1WM+Xfqi6lP1qI5HYvXzo70YVdAbs2l4F2LSJwjWQnUWfJgWdceJJXcsiboc5LUNn6AE+27E6CRuV7UMC9EFKO/Ltqc= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 77 ansible_user: core ansible_user_dir: /var/home/core ansible_user_gecos: CoreOS Admin ansible_user_gid: 1000 ansible_user_id: core ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_architecture_repo: /var/home/core/src/github.com/openstack-k8s-operators/architecture cifmw_architecture_repo_relative: src/github.com/openstack-k8s-operators/architecture cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_basedir: /var/home/core/ci-framework-data cifmw_default_dns_servers: - 1.1.1.1 - 8.8.8.8 cifmw_dlrn_report_result: false cifmw_installyamls_repos: /var/home/core/src/github.com/openstack-k8s-operators/install_yamls cifmw_installyamls_repos_relative: src/github.com/openstack-k8s-operators/install_yamls cifmw_nolog: true cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: /var/home/core/.crc/machines/crc/kubeconfig cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_openstack_k8s_operators_org_url: https://github.com/openstack-k8s-operators cifmw_openstack_namespace: openstack cifmw_repo: /var/home/core/src/github.com/openstack-k8s-operators/ci-framework cifmw_repo_relative: src/github.com/openstack-k8s-operators/ci-framework cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: vexxhost crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 discovered_interpreter_python: /usr/bin/python3.9 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' gather_subset: - all group_names: - ocps groups: all: - controller - crc - standalone computes: [] ocps: - crc rh-subscription: - standalone ungrouped: *id001 zuul_unreachable: [] inventory_dir: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4 inventory_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/inventory.yaml inventory_hostname: crc inventory_hostname_short: crc module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 52b2f704-3175-40e6-96da-8c8b45b50226 host_id: d19710e37f7b2620eb9f1bc9cfdfc06732b1f0c31221781941dd4533 interface_ip: 38.102.83.97 label: crc-cloud-ocp-4-18-1-3xl private_ipv4: 38.102.83.97 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.97 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__10ecd797ec9368ecd51cdd69cf5ce40a70e7da09 operator_namespace: openstack-operators osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms playbook_dir: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory unsafe_vars: adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_connection: ssh ansible_host: 38.102.83.97 ansible_port: 22 ansible_python_interpreter: auto ansible_user: core cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_dlrn_report_result: false cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 52b2f704-3175-40e6-96da-8c8b45b50226 host_id: d19710e37f7b2620eb9f1bc9cfdfc06732b1f0c31221781941dd4533 interface_ip: 38.102.83.97 label: crc-cloud-ocp-4-18-1-3xl private_ipv4: 38.102.83.97 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.97 public_ipv6: '' region: RegionOne slot: null osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul_log_collection: true upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 3911aa2e57464e5ea41c24d6ab361757 build_refs: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null buildset: ae39f936defb47d8b8026507b61685a3 buildset_refs: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 child_jobs: [] commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 event_id: 46eb15f0-f788-11f0-870c-f967a8119d20 executor: hostname: ze04.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/inventory.yaml log_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/logs result_data_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/results.json src_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/src work_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work items: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null job: adoption-standalone-to-crc-no-ceph jobtags: [] max_attempts: 1 message: dW5pMDRkZWx0YS1pcHY2OiBObyBPVk4gREhDUCBmb3IgYmFyZW1ldGFsIHBvcnRzCgpBZGRzIGBvdm4vZGlzYWJsZV9vdm5fZGhjcF9mb3JfYmFyZW1ldGFsX3BvcnRzYCBjb25maWd1cmF0aW9uIHRvIHRoZSB1bmkwNGRlbHRhLWlwdjYgYGNvbmZpZ19kb3dubG9hZC55YW1sYCB1c2luZyBgbmV1dHJvbjo6Y29uZmlnOjpwbHVnaW5fbWwyX2NvbmZpZ2AuDQogICAgDQpUaGlzIGRpc2FibGVzIE9WTidzIGJ1aWx0LWluIERIQ1Agc2VydmljZSBmb3IgYmFyZW1ldGFsIHBvcnRzLCBhbGxvd2luZyBleHRlcm5hbCBESENQIHRvIGJlIHVzZWQgaW5zdGVhZC4gVXNlcyB0aGUgY29uZmlnIGNsYXNzIGFwcHJvYWNoIHNpbmNlIHRoZSBkaXJlY3QgcHVwcGV0IHBhcmFtZXRlciB3YXMgbm90IGJhY2twb3J0ZWQgdG8gT1NQIDE3LjEuDQoNCkppcmE6IFtPU1BSSC0yMDAyMV0oaHR0cHM6Ly9pc3N1ZXMucmVkaGF0LmNvbS8vYnJvd3NlL09TUFJILTIwMDIxKQ== patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 playbooks: - path: untrusted/project_3/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/deploy_standalone_run_repo_tests.yaml roles: - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/architecture: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/architecture checkout: main checkout_description: project override ref commit: 2eaa1ef0f72a4e7ef0a5042b135993cbfc51eacc name: openstack-k8s-operators/architecture required: true short_name: architecture src_dir: src/github.com/openstack-k8s-operators/architecture github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/data-plane-adoption: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption checkout: main checkout_description: project override ref commit: 7346bd354c161fbd39016de93e1981fb8edfe179 name: openstack-k8s-operators/data-plane-adoption required: true short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project override ref commit: 13897053815e2df424a56208aa288cf95b7283d1 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config review.rdoproject.org/rdo-jobs: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/rdo-jobs checkout: master checkout_description: project default branch commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 name: rdo-jobs required: true short_name: rdo-jobs src_dir: src/review.rdoproject.org/rdo-jobs ref: refs/pull/1202/head resources: {} tenant: rdoproject.org timeout: 14400 topic: null voting: true zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '4' zuul_execution_trusted: 'False' zuul_log_collection: true zuul_success: 'True' zuul_will_retry: 'False' standalone: adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_all_ipv4_addresses: - 38.102.83.154 ansible_all_ipv6_addresses: - fe80::f816:3eff:fe0d:6f45 ansible_apparmor: status: disabled ansible_architecture: x86_64 ansible_bios_date: 04/01/2014 ansible_bios_vendor: SeaBIOS ansible_bios_version: 1.15.0-1 ansible_board_asset_tag: NA ansible_board_name: NA ansible_board_serial: NA ansible_board_vendor: NA ansible_board_version: NA ansible_chassis_asset_tag: NA ansible_chassis_serial: NA ansible_chassis_vendor: QEMU ansible_chassis_version: pc-i440fx-6.2 ansible_check_mode: false ansible_cmdline: BOOT_IMAGE: (hd0,gpt3)/vmlinuz-5.14.0-284.11.1.el9_2.x86_64 console: ttyS0,115200n8 crashkernel: 1G-4G:192M,4G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true root: UUID=a3dd82de-ffc6-4652-88b9-80e003b8f20a ansible_config_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/ansible.cfg ansible_connection: ssh ansible_date_time: date: '2026-01-22' day: '22' epoch: '1769082662' epoch_int: '1769082662' hour: '06' iso8601: '2026-01-22T11:51:02Z' iso8601_basic: 20260122T065102760627 iso8601_basic_short: 20260122T065102 iso8601_micro: '2026-01-22T11:51:02.760627Z' minute: '51' month: '01' second: '02' time: 06:51:02 tz: EST tz_dst: EDT tz_offset: '-0500' weekday: Thursday weekday_number: '4' weeknumber: '03' year: '2026' ansible_default_ipv4: address: 38.102.83.154 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:0d:6f:45 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether ansible_default_ipv6: {} ansible_device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 vda3: - boot vda4: - root masters: {} uuids: sr0: - 2026-01-22-11-50-05-00 vda2: - 7B77-95E7 vda3: - b141154b-6a70-437a-a97f-d160c9ba37eb vda4: - a3dd82de-ffc6-4652-88b9-80e003b8f20a ansible_devices: sr0: holders: [] host: 'IDE interface: Intel Corporation 82371SB PIIX3 IDE [Natoma/Triton II]' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2026-01-22-11-50-05-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '0' vendor: QEMU virtual: 1 vda: holders: [] host: 'SCSI storage controller: Red Hat, Inc. Virtio block device' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: [] sectors: '2048' sectorsize: 512 size: 1.00 MB start: '2048' uuid: null vda2: holders: [] links: ids: [] labels: [] masters: [] uuids: - 7B77-95E7 sectors: '409600' sectorsize: 512 size: 200.00 MB start: '4096' uuid: 7B77-95E7 vda3: holders: [] links: ids: [] labels: - boot masters: [] uuids: - b141154b-6a70-437a-a97f-d160c9ba37eb sectors: '1024000' sectorsize: 512 size: 500.00 MB start: '413696' uuid: b141154b-6a70-437a-a97f-d160c9ba37eb vda4: holders: [] links: ids: [] labels: - root masters: [] uuids: - a3dd82de-ffc6-4652-88b9-80e003b8f20a sectors: '837423071' sectorsize: 512 size: 399.31 GB start: '1437696' uuid: a3dd82de-ffc6-4652-88b9-80e003b8f20a removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '838860800' sectorsize: '512' size: 400.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 ansible_diff_mode: false ansible_distribution: RedHat ansible_distribution_file_parsed: true ansible_distribution_file_path: /etc/redhat-release ansible_distribution_file_search_string: Red Hat ansible_distribution_file_variety: RedHat ansible_distribution_major_version: '9' ansible_distribution_release: Plow ansible_distribution_version: '9.2' ansible_dns: nameservers: - 199.204.44.24 - 199.204.47.54 search: - novalocal ansible_domain: novalocal ansible_effective_group_id: 1000 ansible_effective_user_id: 1000 ansible_env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus HOME: /home/zuul LANG: C.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 34400 22 SSH_CONNECTION: 38.102.83.114 34400 38.102.83.154 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '1' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f ansible_eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.154 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe0d:6f45 prefix: '64' scope: link macaddress: fa:16:3e:0d:6f:45 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether ansible_facts: _ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.154 all_ipv6_addresses: - fe80::f816:3eff:fe0d:6f45 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,gpt3)/vmlinuz-5.14.0-284.11.1.el9_2.x86_64 console: ttyS0,115200n8 crashkernel: 1G-4G:192M,4G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true root: UUID=a3dd82de-ffc6-4652-88b9-80e003b8f20a date_time: date: '2026-01-22' day: '22' epoch: '1769082662' epoch_int: '1769082662' hour: '06' iso8601: '2026-01-22T11:51:02Z' iso8601_basic: 20260122T065102760627 iso8601_basic_short: 20260122T065102 iso8601_micro: '2026-01-22T11:51:02.760627Z' minute: '51' month: '01' second: '02' time: 06:51:02 tz: EST tz_dst: EDT tz_offset: '-0500' weekday: Thursday weekday_number: '4' weeknumber: '03' year: '2026' default_ipv4: address: 38.102.83.154 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:0d:6f:45 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 vda3: - boot vda4: - root masters: {} uuids: sr0: - 2026-01-22-11-50-05-00 vda2: - 7B77-95E7 vda3: - b141154b-6a70-437a-a97f-d160c9ba37eb vda4: - a3dd82de-ffc6-4652-88b9-80e003b8f20a devices: sr0: holders: [] host: 'IDE interface: Intel Corporation 82371SB PIIX3 IDE [Natoma/Triton II]' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2026-01-22-11-50-05-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '0' vendor: QEMU virtual: 1 vda: holders: [] host: 'SCSI storage controller: Red Hat, Inc. Virtio block device' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: [] sectors: '2048' sectorsize: 512 size: 1.00 MB start: '2048' uuid: null vda2: holders: [] links: ids: [] labels: [] masters: [] uuids: - 7B77-95E7 sectors: '409600' sectorsize: 512 size: 200.00 MB start: '4096' uuid: 7B77-95E7 vda3: holders: [] links: ids: [] labels: - boot masters: [] uuids: - b141154b-6a70-437a-a97f-d160c9ba37eb sectors: '1024000' sectorsize: 512 size: 500.00 MB start: '413696' uuid: b141154b-6a70-437a-a97f-d160c9ba37eb vda4: holders: [] links: ids: [] labels: - root masters: [] uuids: - a3dd82de-ffc6-4652-88b9-80e003b8f20a sectors: '837423071' sectorsize: 512 size: 399.31 GB start: '1437696' uuid: a3dd82de-ffc6-4652-88b9-80e003b8f20a removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '838860800' sectorsize: '512' size: 400.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: RedHat distribution_file_parsed: true distribution_file_path: /etc/redhat-release distribution_file_search_string: Red Hat distribution_file_variety: RedHat distribution_major_version: '9' distribution_release: Plow distribution_version: '9.2' dns: nameservers: - 199.204.44.24 - 199.204.47.54 search: - novalocal domain: novalocal effective_group_id: 1000 effective_user_id: 1000 env: BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus HOME: /home/zuul LANG: C.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 34400 22 SSH_CONNECTION: 38.102.83.114 34400 38.102.83.154 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '1' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] netns_local: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.154 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe0d:6f45 prefix: '64' scope: link macaddress: fa:16:3e:0d:6f:45 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: np0005592099.novalocal gather_subset: - all hostname: np0005592099 hostnqn: '' interfaces: - lo - eth0 is_chroot: false iscsi_iqn: '' kernel: 5.14.0-284.11.1.el9_2.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Wed Apr 12 10:45:03 EDT 2023' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] netns_local: on [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: on [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.02 1m: 0.26 5m: 0.08 locally_reachable_ips: ipv4: - 38.102.83.154 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe0d:6f45 lsb: {} lvm: N/A machine: x86_64 machine_id: 3e33164a5d30c3060156ec878079cfde memfree_mb: 15259 memory_mb: nocache: free: 15471 used: 267 real: free: 15259 total: 15738 used: 479 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 15738 module_setup: true mounts: - block_available: 103621572 block_size: 4096 block_total: 104675323 block_used: 1053751 device: /dev/vda4 fstype: xfs inode_available: 209319338 inode_total: 209355760 inode_used: 36422 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 424433958912 size_total: 428750123008 uuid: a3dd82de-ffc6-4652-88b9-80e003b8f20a - block_available: 86827 block_size: 4096 block_total: 126632 block_used: 39805 device: /dev/vda3 fstype: xfs inode_available: 255693 inode_total: 256000 inode_used: 307 mount: /boot options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 355643392 size_total: 518684672 uuid: b141154b-6a70-437a-a97f-d160c9ba37eb - block_available: 49363 block_size: 4096 block_total: 51145 block_used: 1782 device: /dev/vda2 fstype: vfat inode_available: 0 inode_total: 0 inode_used: 0 mount: /boot/efi options: rw,relatime,fmask=0077,dmask=0077,codepage=437,iocharset=ascii,shortname=winnt,errors=remount-ro size_available: 202190848 size_total: 209489920 uuid: 7B77-95E7 - block_available: 49363 block_size: 4096 block_total: 51145 block_used: 1782 device: /dev/vda2 fstype: vfat inode_available: 0 inode_total: 0 inode_used: 0 mount: /efi options: rw,relatime,fmask=0077,dmask=0077,codepage=437,iocharset=ascii,shortname=winnt,errors=remount-ro size_available: 202190848 size_total: 209489920 uuid: 7B77-95E7 nodename: np0005592099.novalocal os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,gpt3)/vmlinuz-5.14.0-284.11.1.el9_2.x86_64 console: - tty0 - ttyS0,115200n8 crashkernel: 1G-4G:192M,4G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true root: UUID=a3dd82de-ffc6-4652-88b9-80e003b8f20a processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 8 processor_nproc: 8 processor_threads_per_core: 1 processor_vcpus: 8 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.3.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 16 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 16 - final - 0 python_version: 3.9.16 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHshBn5HAHz88VAkYPy+zW7Q4qbNj/c4PRsOWDP2eAC6W7z5/Oz3tsmBCpjy9PSp+wJviFjb1CyENfQPPw5Hwsk= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAINhjauWuVAzrHXvL4tkTApZ4BJedjnOwXPjmga4t5GLF ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQDXskHoYw0f8mBNYy1A0QJnoyrIF/GzFRqR0Pb0w28iN+QGXRtakj3Y5pxKAOmoRhWMa++QaLtn89GSd4w5mOLcc9nGZ809fAh/U+IYXjn9+ujU0tgEoU5eVTiOE8ePEoPAOmvqsusxzQkhs+sSONEnqXBSDwUK7I6gcOpp1sWG3gak3nJMF4rH2Xf9QZ3QNpxarxvZPiClmaFXL7lRg00rqnQsUY5BzReYn2vqdocuDuaGI749basPgV6JbcfGqwJd3XG3//6YOA5FvhwTx69p9qw+fi82Yoe8g/nT4qlFiDB03vnbw3Rj2knZKJOlG0P8h0EptUzp62NJY1TLSlBZduf+TbcioUqXNDxUqiugVpAOjA9Jo43LdF/16DGQ4TzHD+5ue4AZhTDKVWyz18nmKivN9flssNP5yONLEIrG/glmQpzo6FzDNTSIEliF3DbgG79mRZnBzb8sLSAccZq4aMPGtUzzdS53YAA3JOVzfs5qG2yICtl0D6HUNQmtcxs= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 52 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack ansible_fibre_channel_wwn: [] ansible_fips: false ansible_forks: 5 ansible_form_factor: Other ansible_fqdn: np0005592099.novalocal ansible_host: 38.102.83.154 ansible_hostname: np0005592099 ansible_hostnqn: '' ansible_interfaces: - lo - eth0 ansible_inventory_sources: - /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/inventory.yaml ansible_is_chroot: false ansible_iscsi_iqn: '' ansible_kernel: 5.14.0-284.11.1.el9_2.x86_64 ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Wed Apr 12 10:45:03 EDT 2023' ansible_lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] fcoe_mtu: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] netns_local: on [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_lockless: on [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback ansible_loadavg: 15m: 0.02 1m: 0.26 5m: 0.08 ansible_local: {} ansible_locally_reachable_ips: ipv4: - 38.102.83.154 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe0d:6f45 ansible_lsb: {} ansible_lvm: N/A ansible_machine: x86_64 ansible_machine_id: 3e33164a5d30c3060156ec878079cfde ansible_memfree_mb: 15259 ansible_memory_mb: nocache: free: 15471 used: 267 real: free: 15259 total: 15738 used: 479 swap: cached: 0 free: 0 total: 0 used: 0 ansible_memtotal_mb: 15738 ansible_mounts: - block_available: 103621572 block_size: 4096 block_total: 104675323 block_used: 1053751 device: /dev/vda4 fstype: xfs inode_available: 209319338 inode_total: 209355760 inode_used: 36422 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 424433958912 size_total: 428750123008 uuid: a3dd82de-ffc6-4652-88b9-80e003b8f20a - block_available: 86827 block_size: 4096 block_total: 126632 block_used: 39805 device: /dev/vda3 fstype: xfs inode_available: 255693 inode_total: 256000 inode_used: 307 mount: /boot options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 355643392 size_total: 518684672 uuid: b141154b-6a70-437a-a97f-d160c9ba37eb - block_available: 49363 block_size: 4096 block_total: 51145 block_used: 1782 device: /dev/vda2 fstype: vfat inode_available: 0 inode_total: 0 inode_used: 0 mount: /boot/efi options: rw,relatime,fmask=0077,dmask=0077,codepage=437,iocharset=ascii,shortname=winnt,errors=remount-ro size_available: 202190848 size_total: 209489920 uuid: 7B77-95E7 - block_available: 49363 block_size: 4096 block_total: 51145 block_used: 1782 device: /dev/vda2 fstype: vfat inode_available: 0 inode_total: 0 inode_used: 0 mount: /efi options: rw,relatime,fmask=0077,dmask=0077,codepage=437,iocharset=ascii,shortname=winnt,errors=remount-ro size_available: 202190848 size_total: 209489920 uuid: 7B77-95E7 ansible_nodename: np0005592099.novalocal ansible_os_family: RedHat ansible_pkg_mgr: dnf ansible_playbook_python: /usr/lib/zuul/ansible/8/bin/python ansible_port: 22 ansible_proc_cmdline: BOOT_IMAGE: (hd0,gpt3)/vmlinuz-5.14.0-284.11.1.el9_2.x86_64 console: - tty0 - ttyS0,115200n8 crashkernel: 1G-4G:192M,4G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true root: UUID=a3dd82de-ffc6-4652-88b9-80e003b8f20a ansible_processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor ansible_processor_cores: 1 ansible_processor_count: 8 ansible_processor_nproc: 8 ansible_processor_threads_per_core: 1 ansible_processor_vcpus: 8 ansible_product_name: OpenStack Nova ansible_product_serial: NA ansible_product_uuid: NA ansible_product_version: 26.3.1 ansible_python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 16 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 16 - final - 0 ansible_python_interpreter: auto ansible_python_version: 3.9.16 ansible_real_group_id: 1000 ansible_real_user_id: 1000 ansible_run_tags: - all ansible_scp_extra_args: -o PermitLocalCommand=no ansible_selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted ansible_selinux_python_present: true ansible_service_mgr: systemd ansible_sftp_extra_args: -o PermitLocalCommand=no ansible_skip_tags: [] ansible_ssh_common_args: -o PermitLocalCommand=no ansible_ssh_executable: ssh ansible_ssh_extra_args: -o PermitLocalCommand=no ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHshBn5HAHz88VAkYPy+zW7Q4qbNj/c4PRsOWDP2eAC6W7z5/Oz3tsmBCpjy9PSp+wJviFjb1CyENfQPPw5Hwsk= ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAINhjauWuVAzrHXvL4tkTApZ4BJedjnOwXPjmga4t5GLF ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519 ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQDXskHoYw0f8mBNYy1A0QJnoyrIF/GzFRqR0Pb0w28iN+QGXRtakj3Y5pxKAOmoRhWMa++QaLtn89GSd4w5mOLcc9nGZ809fAh/U+IYXjn9+ujU0tgEoU5eVTiOE8ePEoPAOmvqsusxzQkhs+sSONEnqXBSDwUK7I6gcOpp1sWG3gak3nJMF4rH2Xf9QZ3QNpxarxvZPiClmaFXL7lRg00rqnQsUY5BzReYn2vqdocuDuaGI749basPgV6JbcfGqwJd3XG3//6YOA5FvhwTx69p9qw+fi82Yoe8g/nT4qlFiDB03vnbw3Rj2knZKJOlG0P8h0EptUzp62NJY1TLSlBZduf+TbcioUqXNDxUqiugVpAOjA9Jo43LdF/16DGQ4TzHD+5ue4AZhTDKVWyz18nmKivN9flssNP5yONLEIrG/glmQpzo6FzDNTSIEliF3DbgG79mRZnBzb8sLSAccZq4aMPGtUzzdS53YAA3JOVzfs5qG2yICtl0D6HUNQmtcxs= ansible_ssh_host_key_rsa_public_keytype: ssh-rsa ansible_swapfree_mb: 0 ansible_swaptotal_mb: 0 ansible_system: Linux ansible_system_capabilities: - '' ansible_system_capabilities_enforced: 'True' ansible_system_vendor: OpenStack Foundation ansible_uptime_seconds: 52 ansible_user: zuul ansible_user_dir: /home/zuul ansible_user_gecos: '' ansible_user_gid: 1000 ansible_user_id: zuul ansible_user_shell: /bin/bash ansible_user_uid: 1000 ansible_userspace_architecture: x86_64 ansible_userspace_bits: '64' ansible_verbosity: 1 ansible_version: full: 2.15.12 major: 2 minor: 15 revision: 12 string: 2.15.12 ansible_virtualization_role: guest ansible_virtualization_tech_guest: - openstack ansible_virtualization_tech_host: - kvm ansible_virtualization_type: openstack cifmw_architecture_repo: /home/zuul/src/github.com/openstack-k8s-operators/architecture cifmw_architecture_repo_relative: src/github.com/openstack-k8s-operators/architecture cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_basedir: /home/zuul/ci-framework-data cifmw_default_dns_servers: - 1.1.1.1 - 8.8.8.8 cifmw_dlrn_report_result: false cifmw_installyamls_repos: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls cifmw_installyamls_repos_relative: src/github.com/openstack-k8s-operators/install_yamls cifmw_nolog: true cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_openstack_k8s_operators_org_url: https://github.com/openstack-k8s-operators cifmw_openstack_namespace: openstack cifmw_repo: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework cifmw_repo_relative: src/github.com/openstack-k8s-operators/ci-framework cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: vexxhost crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 discovered_interpreter_python: /usr/bin/python3 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' gather_subset: - all group_names: - rh-subscription groups: all: - controller - crc - standalone computes: [] ocps: - crc rh-subscription: - standalone ungrouped: *id001 zuul_unreachable: [] inventory_dir: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4 inventory_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/inventory.yaml inventory_hostname: standalone inventory_hostname_short: standalone module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 71c2ce28-24b8-4530-b131-72e04889a182 host_id: 13ae1ffea0b266ee75d72a572609f55d1fe724f11576cedb6317b057 interface_ip: 38.102.83.154 label: cloud-rhel-9-2-tripleo private_ipv4: 38.102.83.154 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.154 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__10ecd797ec9368ecd51cdd69cf5ce40a70e7da09 operator_namespace: openstack-operators osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms playbook_dir: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory unsafe_vars: adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_connection: ssh ansible_host: 38.102.83.154 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_dlrn_report_result: false cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 71c2ce28-24b8-4530-b131-72e04889a182 host_id: 13ae1ffea0b266ee75d72a572609f55d1fe724f11576cedb6317b057 interface_ip: 38.102.83.154 label: cloud-rhel-9-2-tripleo private_ipv4: 38.102.83.154 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.154 public_ipv6: '' region: RegionOne slot: null osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul_log_collection: true upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 3911aa2e57464e5ea41c24d6ab361757 build_refs: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null buildset: ae39f936defb47d8b8026507b61685a3 buildset_refs: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 child_jobs: [] commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 event_id: 46eb15f0-f788-11f0-870c-f967a8119d20 executor: hostname: ze04.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/inventory.yaml log_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/logs result_data_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/results.json src_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/src work_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work items: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null job: adoption-standalone-to-crc-no-ceph jobtags: [] max_attempts: 1 message: dW5pMDRkZWx0YS1pcHY2OiBObyBPVk4gREhDUCBmb3IgYmFyZW1ldGFsIHBvcnRzCgpBZGRzIGBvdm4vZGlzYWJsZV9vdm5fZGhjcF9mb3JfYmFyZW1ldGFsX3BvcnRzYCBjb25maWd1cmF0aW9uIHRvIHRoZSB1bmkwNGRlbHRhLWlwdjYgYGNvbmZpZ19kb3dubG9hZC55YW1sYCB1c2luZyBgbmV1dHJvbjo6Y29uZmlnOjpwbHVnaW5fbWwyX2NvbmZpZ2AuDQogICAgDQpUaGlzIGRpc2FibGVzIE9WTidzIGJ1aWx0LWluIERIQ1Agc2VydmljZSBmb3IgYmFyZW1ldGFsIHBvcnRzLCBhbGxvd2luZyBleHRlcm5hbCBESENQIHRvIGJlIHVzZWQgaW5zdGVhZC4gVXNlcyB0aGUgY29uZmlnIGNsYXNzIGFwcHJvYWNoIHNpbmNlIHRoZSBkaXJlY3QgcHVwcGV0IHBhcmFtZXRlciB3YXMgbm90IGJhY2twb3J0ZWQgdG8gT1NQIDE3LjEuDQoNCkppcmE6IFtPU1BSSC0yMDAyMV0oaHR0cHM6Ly9pc3N1ZXMucmVkaGF0LmNvbS8vYnJvd3NlL09TUFJILTIwMDIxKQ== patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 playbooks: - path: untrusted/project_3/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/deploy_standalone_run_repo_tests.yaml roles: - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/architecture: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/architecture checkout: main checkout_description: project override ref commit: 2eaa1ef0f72a4e7ef0a5042b135993cbfc51eacc name: openstack-k8s-operators/architecture required: true short_name: architecture src_dir: src/github.com/openstack-k8s-operators/architecture github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/data-plane-adoption: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption checkout: main checkout_description: project override ref commit: 7346bd354c161fbd39016de93e1981fb8edfe179 name: openstack-k8s-operators/data-plane-adoption required: true short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project override ref commit: 13897053815e2df424a56208aa288cf95b7283d1 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config review.rdoproject.org/rdo-jobs: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/rdo-jobs checkout: master checkout_description: project default branch commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 name: rdo-jobs required: true short_name: rdo-jobs src_dir: src/review.rdoproject.org/rdo-jobs ref: refs/pull/1202/head resources: {} tenant: rdoproject.org timeout: 14400 topic: null voting: true zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '4' zuul_execution_trusted: 'False' zuul_log_collection: true zuul_success: 'True' zuul_will_retry: 'False' inventory_dir: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4 inventory_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/post_playbook_4/inventory.yaml inventory_hostname: controller inventory_hostname_short: controller logfiles_dest_dir: /home/zuul/ci-framework-data/logs/2026-01-22_14-28 module_setup: true nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 8e2c7ff5-ac7b-45b2-a9c5-05e2d4922b4b host_id: bdb78bf25a270582fae0ca49d447ffffc4c7a50a772a0a4c0593588a interface_ip: 38.102.83.83 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.83 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.83 public_ipv6: '' region: RegionOne slot: null omit: __omit_place_holder__10ecd797ec9368ecd51cdd69cf5ce40a70e7da09 operator_namespace: '{{ cifmw_install_yamls_defaults[''OPERATOR_NAMESPACE''] | default(''openstack-operators'') }}' osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms play_hosts: *id002 playbook_dir: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/ci/playbooks push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true role_name: artifacts role_names: *id003 role_path: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/untrusted/project_0/github.com/openstack-k8s-operators/ci-framework/roles/artifacts role_uuid: fa163e3b-3c83-c0d1-27ab-00000000002f standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory unsafe_vars: adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_connection: ssh ansible_host: 38.102.83.83 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_dlrn_report_result: false cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 8e2c7ff5-ac7b-45b2-a9c5-05e2d4922b4b host_id: bdb78bf25a270582fae0ca49d447ffffc4c7a50a772a0a4c0593588a interface_ip: 38.102.83.83 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.83 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.83 public_ipv6: '' region: RegionOne slot: null osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul_log_collection: true upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 3911aa2e57464e5ea41c24d6ab361757 build_refs: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null buildset: ae39f936defb47d8b8026507b61685a3 buildset_refs: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 child_jobs: [] commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 event_id: 46eb15f0-f788-11f0-870c-f967a8119d20 executor: hostname: ze04.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/inventory.yaml log_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/logs result_data_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/results.json src_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/src work_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work items: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null job: adoption-standalone-to-crc-no-ceph jobtags: [] max_attempts: 1 message: dW5pMDRkZWx0YS1pcHY2OiBObyBPVk4gREhDUCBmb3IgYmFyZW1ldGFsIHBvcnRzCgpBZGRzIGBvdm4vZGlzYWJsZV9vdm5fZGhjcF9mb3JfYmFyZW1ldGFsX3BvcnRzYCBjb25maWd1cmF0aW9uIHRvIHRoZSB1bmkwNGRlbHRhLWlwdjYgYGNvbmZpZ19kb3dubG9hZC55YW1sYCB1c2luZyBgbmV1dHJvbjo6Y29uZmlnOjpwbHVnaW5fbWwyX2NvbmZpZ2AuDQogICAgDQpUaGlzIGRpc2FibGVzIE9WTidzIGJ1aWx0LWluIERIQ1Agc2VydmljZSBmb3IgYmFyZW1ldGFsIHBvcnRzLCBhbGxvd2luZyBleHRlcm5hbCBESENQIHRvIGJlIHVzZWQgaW5zdGVhZC4gVXNlcyB0aGUgY29uZmlnIGNsYXNzIGFwcHJvYWNoIHNpbmNlIHRoZSBkaXJlY3QgcHVwcGV0IHBhcmFtZXRlciB3YXMgbm90IGJhY2twb3J0ZWQgdG8gT1NQIDE3LjEuDQoNCkppcmE6IFtPU1BSSC0yMDAyMV0oaHR0cHM6Ly9pc3N1ZXMucmVkaGF0LmNvbS8vYnJvd3NlL09TUFJILTIwMDIxKQ== patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 playbooks: - path: untrusted/project_3/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/deploy_standalone_run_repo_tests.yaml roles: - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/architecture: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/architecture checkout: main checkout_description: project override ref commit: 2eaa1ef0f72a4e7ef0a5042b135993cbfc51eacc name: openstack-k8s-operators/architecture required: true short_name: architecture src_dir: src/github.com/openstack-k8s-operators/architecture github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/data-plane-adoption: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption checkout: main checkout_description: project override ref commit: 7346bd354c161fbd39016de93e1981fb8edfe179 name: openstack-k8s-operators/data-plane-adoption required: true short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project override ref commit: 13897053815e2df424a56208aa288cf95b7283d1 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config review.rdoproject.org/rdo-jobs: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/rdo-jobs checkout: master checkout_description: project default branch commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 name: rdo-jobs required: true short_name: rdo-jobs src_dir: src/review.rdoproject.org/rdo-jobs ref: refs/pull/1202/head resources: {} tenant: rdoproject.org timeout: 14400 topic: null voting: true zuul_change_list: - data-plane-adoption zuul_execution_branch: main zuul_execution_canonical_name_and_path: github.com/openstack-k8s-operators/ci-framework/ci/playbooks/e2e-collect-logs.yml zuul_execution_phase: post zuul_execution_phase_index: '4' zuul_execution_trusted: 'False' zuul_log_collection: true zuul_success: 'True' zuul_will_retry: 'False' home/zuul/zuul-output/logs/ci-framework-data/artifacts/zuul_inventory.yml0000644000175000017500000010713515134437263026221 0ustar zuulzuulall: children: computes: hosts: {} ocps: hosts: crc: null rh-subscription: hosts: standalone: null zuul_unreachable: hosts: {} hosts: controller: adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_connection: ssh ansible_host: 38.102.83.83 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_dlrn_report_result: false cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 8e2c7ff5-ac7b-45b2-a9c5-05e2d4922b4b host_id: bdb78bf25a270582fae0ca49d447ffffc4c7a50a772a0a4c0593588a interface_ip: 38.102.83.83 label: cloud-centos-9-stream-tripleo private_ipv4: 38.102.83.83 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.83 public_ipv6: '' region: RegionOne slot: null osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul_log_collection: true crc: adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_connection: ssh ansible_host: 38.102.83.97 ansible_port: 22 ansible_python_interpreter: auto ansible_user: core cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_dlrn_report_result: false cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 52b2f704-3175-40e6-96da-8c8b45b50226 host_id: d19710e37f7b2620eb9f1bc9cfdfc06732b1f0c31221781941dd4533 interface_ip: 38.102.83.97 label: crc-cloud-ocp-4-18-1-3xl private_ipv4: 38.102.83.97 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.97 public_ipv6: '' region: RegionOne slot: null osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul_log_collection: true localhost: ansible_connection: local standalone: adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' ansible_connection: ssh ansible_host: 38.102.83.154 ansible_port: 22 ansible_python_interpreter: auto ansible_user: zuul cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_dlrn_report_result: false cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' nodepool: az: nova cloud: vexxhost-nodepool-tripleo external_id: 71c2ce28-24b8-4530-b131-72e04889a182 host_id: 13ae1ffea0b266ee75d72a572609f55d1fe724f11576cedb6317b057 interface_ip: 38.102.83.154 label: cloud-rhel-9-2-tripleo private_ipv4: 38.102.83.154 private_ipv6: null provider: vexxhost-nodepool-tripleo public_ipv4: 38.102.83.154 public_ipv6: '' region: RegionOne slot: null osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul_log_collection: true vars: adoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_dlrn_report_result: false cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 3911aa2e57464e5ea41c24d6ab361757 build_refs: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null buildset: ae39f936defb47d8b8026507b61685a3 buildset_refs: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 child_jobs: [] commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 event_id: 46eb15f0-f788-11f0-870c-f967a8119d20 executor: hostname: ze04.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/inventory.yaml log_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/logs result_data_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/results.json src_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/src work_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work items: - branch: main change: '1202' change_message: "uni04delta-ipv6: No OVN DHCP for baremetal ports\n\nAdds `ovn/disable_ovn_dhcp_for_baremetal_ports` configuration to the uni04delta-ipv6 `config_download.yaml` using `neutron::config::plugin_ml2_config`.\r\n \ \r\nThis disables OVN's built-in DHCP service for baremetal ports, allowing external DHCP to be used instead. Uses the config class approach since the direct puppet parameter was not backported to OSP 17.1.\r\n\r\nJira: [OSPRH-20021](https://issues.redhat.com//browse/OSPRH-20021)" change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null job: adoption-standalone-to-crc-no-ceph jobtags: [] max_attempts: 1 message: dW5pMDRkZWx0YS1pcHY2OiBObyBPVk4gREhDUCBmb3IgYmFyZW1ldGFsIHBvcnRzCgpBZGRzIGBvdm4vZGlzYWJsZV9vdm5fZGhjcF9mb3JfYmFyZW1ldGFsX3BvcnRzYCBjb25maWd1cmF0aW9uIHRvIHRoZSB1bmkwNGRlbHRhLWlwdjYgYGNvbmZpZ19kb3dubG9hZC55YW1sYCB1c2luZyBgbmV1dHJvbjo6Y29uZmlnOjpwbHVnaW5fbWwyX2NvbmZpZ2AuDQogICAgDQpUaGlzIGRpc2FibGVzIE9WTidzIGJ1aWx0LWluIERIQ1Agc2VydmljZSBmb3IgYmFyZW1ldGFsIHBvcnRzLCBhbGxvd2luZyBleHRlcm5hbCBESENQIHRvIGJlIHVzZWQgaW5zdGVhZC4gVXNlcyB0aGUgY29uZmlnIGNsYXNzIGFwcHJvYWNoIHNpbmNlIHRoZSBkaXJlY3QgcHVwcGV0IHBhcmFtZXRlciB3YXMgbm90IGJhY2twb3J0ZWQgdG8gT1NQIDE3LjEuDQoNCkppcmE6IFtPU1BSSC0yMDAyMV0oaHR0cHM6Ly9pc3N1ZXMucmVkaGF0LmNvbS8vYnJvd3NlL09TUFJILTIwMDIxKQ== patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 playbooks: - path: untrusted/project_3/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/deploy_standalone_run_repo_tests.yaml roles: - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/architecture: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/architecture checkout: main checkout_description: project override ref commit: 2eaa1ef0f72a4e7ef0a5042b135993cbfc51eacc name: openstack-k8s-operators/architecture required: true short_name: architecture src_dir: src/github.com/openstack-k8s-operators/architecture github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/data-plane-adoption: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption checkout: main checkout_description: project override ref commit: 7346bd354c161fbd39016de93e1981fb8edfe179 name: openstack-k8s-operators/data-plane-adoption required: true short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project override ref commit: 13897053815e2df424a56208aa288cf95b7283d1 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config review.rdoproject.org/rdo-jobs: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/rdo-jobs checkout: master checkout_description: project default branch commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 name: rdo-jobs required: true short_name: rdo-jobs src_dir: src/review.rdoproject.org/rdo-jobs ref: refs/pull/1202/head resources: {} tenant: rdoproject.org timeout: 14400 topic: null voting: true zuul_log_collection: true home/zuul/zuul-output/logs/ci-framework-data/artifacts/resolv.conf0000644000175000017500000000015215134432007024521 0ustar zuulzuul# Generated by NetworkManager nameserver 192.168.122.10 nameserver 199.204.44.24 nameserver 199.204.47.54 home/zuul/zuul-output/logs/ci-framework-data/artifacts/hosts0000644000175000017500000000023715134432007023427 0ustar zuulzuul127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4 ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 home/zuul/zuul-output/logs/ci-framework-data/artifacts/ip-network.txt0000644000175000017500000000703415134432007025206 0ustar zuulzuuldefault via 38.102.83.1 dev eth0 proto dhcp src 38.102.83.83 metric 100 38.102.83.0/24 dev eth0 proto kernel scope link src 38.102.83.83 metric 100 169.254.169.254 via 38.102.83.126 dev eth0 proto dhcp src 38.102.83.83 metric 100 172.17.0.0/24 dev eth1.20 proto kernel scope link src 172.17.0.4 metric 402 172.18.0.0/24 dev eth1.21 proto kernel scope link src 172.18.0.4 metric 400 172.19.0.0/24 dev eth1.22 proto kernel scope link src 172.19.0.4 metric 401 172.20.0.0/24 dev eth1.23 proto kernel scope link src 172.20.0.4 metric 403 192.168.122.0/24 dev eth1 proto kernel scope link src 192.168.122.11 metric 101 0: from all lookup local 32766: from all lookup main 32767: from all lookup default [ { "ifindex": 1, "ifname": "lo", "flags": [ "LOOPBACK","UP","LOWER_UP" ], "mtu": 65536, "qdisc": "noqueue", "operstate": "UNKNOWN", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "loopback", "address": "00:00:00:00:00:00", "broadcast": "00:00:00:00:00:00" },{ "ifindex": 2, "ifname": "eth0", "flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ], "mtu": 1500, "qdisc": "fq_codel", "operstate": "UP", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "ether", "address": "fa:16:3e:4f:9c:e2", "broadcast": "ff:ff:ff:ff:ff:ff", "altnames": [ "enp0s3","ens3" ] },{ "ifindex": 3, "ifname": "eth1", "flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ], "mtu": 1500, "qdisc": "fq_codel", "operstate": "UP", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "ether", "address": "fa:16:3e:b2:1f:bf", "broadcast": "ff:ff:ff:ff:ff:ff", "altnames": [ "enp0s7","ens7" ] },{ "ifindex": 4, "link": "eth1", "ifname": "eth1.21", "flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ], "mtu": 1496, "qdisc": "noqueue", "operstate": "UP", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "ether", "address": "52:54:00:8c:4a:57", "broadcast": "ff:ff:ff:ff:ff:ff" },{ "ifindex": 5, "link": "eth1", "ifname": "eth1.22", "flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ], "mtu": 1496, "qdisc": "noqueue", "operstate": "UP", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "ether", "address": "52:54:00:34:d6:b6", "broadcast": "ff:ff:ff:ff:ff:ff" },{ "ifindex": 6, "link": "eth1", "ifname": "eth1.20", "flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ], "mtu": 1496, "qdisc": "noqueue", "operstate": "UP", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "ether", "address": "52:54:00:91:b6:a0", "broadcast": "ff:ff:ff:ff:ff:ff" },{ "ifindex": 7, "link": "eth1", "ifname": "eth1.23", "flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ], "mtu": 1496, "qdisc": "noqueue", "operstate": "UP", "linkmode": "DEFAULT", "group": "default", "txqlen": 1000, "link_type": "ether", "address": "52:54:00:e6:85:75", "broadcast": "ff:ff:ff:ff:ff:ff" } ] home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/0000755000175000017500000000000015134411250023463 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/0000755000175000017500000000000015134411250027516 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/0000755000175000017500000000000015134437263030657 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_all.yml0000644000175000017500000000134515134411253033141 0ustar zuulzuul--- - name: Debug make_all_env when: make_all_env is defined ansible.builtin.debug: var: make_all_env - name: Debug make_all_params when: make_all_params is defined ansible.builtin.debug: var: make_all_params - name: Run all retries: "{{ make_all_retries | default(omit) }}" delay: "{{ make_all_delay | default(omit) }}" until: "{{ make_all_until | default(true) }}" register: "make_all_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make all" dry_run: "{{ make_all_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_all_env|default({})), **(make_all_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_help.yml0000644000175000017500000000137515134411253033324 0ustar zuulzuul--- - name: Debug make_help_env when: make_help_env is defined ansible.builtin.debug: var: make_help_env - name: Debug make_help_params when: make_help_params is defined ansible.builtin.debug: var: make_help_params - name: Run help retries: "{{ make_help_retries | default(omit) }}" delay: "{{ make_help_delay | default(omit) }}" until: "{{ make_help_until | default(true) }}" register: "make_help_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make help" dry_run: "{{ make_help_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_help_env|default({})), **(make_help_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cleanup.0000644000175000017500000000144115134411253033273 0ustar zuulzuul--- - name: Debug make_cleanup_env when: make_cleanup_env is defined ansible.builtin.debug: var: make_cleanup_env - name: Debug make_cleanup_params when: make_cleanup_params is defined ansible.builtin.debug: var: make_cleanup_params - name: Run cleanup retries: "{{ make_cleanup_retries | default(omit) }}" delay: "{{ make_cleanup_delay | default(omit) }}" until: "{{ make_cleanup_until | default(true) }}" register: "make_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cleanup" dry_run: "{{ make_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cleanup_env|default({})), **(make_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_deploy_c0000644000175000017500000000161215134411253033364 0ustar zuulzuul--- - name: Debug make_deploy_cleanup_env when: make_deploy_cleanup_env is defined ansible.builtin.debug: var: make_deploy_cleanup_env - name: Debug make_deploy_cleanup_params when: make_deploy_cleanup_params is defined ansible.builtin.debug: var: make_deploy_cleanup_params - name: Run deploy_cleanup retries: "{{ make_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_deploy_cleanup_delay | default(omit) }}" until: "{{ make_deploy_cleanup_until | default(true) }}" register: "make_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make deploy_cleanup" dry_run: "{{ make_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_deploy_cleanup_env|default({})), **(make_deploy_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_wait.yml0000644000175000017500000000136415134411253033336 0ustar zuulzuul--- - name: Debug make_wait_env when: make_wait_env is defined ansible.builtin.debug: var: make_wait_env - name: Debug make_wait_params when: make_wait_params is defined ansible.builtin.debug: var: make_wait_params - name: Run wait retries: "{{ make_wait_retries | default(omit) }}" delay: "{{ make_wait_delay | default(omit) }}" until: "{{ make_wait_until | default(true) }}" register: "make_wait_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make wait" dry_run: "{{ make_wait_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_wait_env|default({})), **(make_wait_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000153515134411253033410 0ustar zuulzuul--- - name: Debug make_crc_storage_env when: make_crc_storage_env is defined ansible.builtin.debug: var: make_crc_storage_env - name: Debug make_crc_storage_params when: make_crc_storage_params is defined ansible.builtin.debug: var: make_crc_storage_params - name: Run crc_storage retries: "{{ make_crc_storage_retries | default(omit) }}" delay: "{{ make_crc_storage_delay | default(omit) }}" until: "{{ make_crc_storage_until | default(true) }}" register: "make_crc_storage_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage" dry_run: "{{ make_crc_storage_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_env|default({})), **(make_crc_storage_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000172515134411253033411 0ustar zuulzuul--- - name: Debug make_crc_storage_cleanup_env when: make_crc_storage_cleanup_env is defined ansible.builtin.debug: var: make_crc_storage_cleanup_env - name: Debug make_crc_storage_cleanup_params when: make_crc_storage_cleanup_params is defined ansible.builtin.debug: var: make_crc_storage_cleanup_params - name: Run crc_storage_cleanup retries: "{{ make_crc_storage_cleanup_retries | default(omit) }}" delay: "{{ make_crc_storage_cleanup_delay | default(omit) }}" until: "{{ make_crc_storage_cleanup_until | default(true) }}" register: "make_crc_storage_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_cleanup" dry_run: "{{ make_crc_storage_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_cleanup_env|default({})), **(make_crc_storage_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_release.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000172515134411253033411 0ustar zuulzuul--- - name: Debug make_crc_storage_release_env when: make_crc_storage_release_env is defined ansible.builtin.debug: var: make_crc_storage_release_env - name: Debug make_crc_storage_release_params when: make_crc_storage_release_params is defined ansible.builtin.debug: var: make_crc_storage_release_params - name: Run crc_storage_release retries: "{{ make_crc_storage_release_retries | default(omit) }}" delay: "{{ make_crc_storage_release_delay | default(omit) }}" until: "{{ make_crc_storage_release_until | default(true) }}" register: "make_crc_storage_release_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_release" dry_run: "{{ make_crc_storage_release_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_release_env|default({})), **(make_crc_storage_release_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_with_retries.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000204015134411253033400 0ustar zuulzuul--- - name: Debug make_crc_storage_with_retries_env when: make_crc_storage_with_retries_env is defined ansible.builtin.debug: var: make_crc_storage_with_retries_env - name: Debug make_crc_storage_with_retries_params when: make_crc_storage_with_retries_params is defined ansible.builtin.debug: var: make_crc_storage_with_retries_params - name: Run crc_storage_with_retries retries: "{{ make_crc_storage_with_retries_retries | default(omit) }}" delay: "{{ make_crc_storage_with_retries_delay | default(omit) }}" until: "{{ make_crc_storage_with_retries_until | default(true) }}" register: "make_crc_storage_with_retries_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_with_retries" dry_run: "{{ make_crc_storage_with_retries_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_with_retries_env|default({})), **(make_crc_storage_with_retries_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_storage_cleanup_with_retries.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_stor0000644000175000017500000000223015134411253033401 0ustar zuulzuul--- - name: Debug make_crc_storage_cleanup_with_retries_env when: make_crc_storage_cleanup_with_retries_env is defined ansible.builtin.debug: var: make_crc_storage_cleanup_with_retries_env - name: Debug make_crc_storage_cleanup_with_retries_params when: make_crc_storage_cleanup_with_retries_params is defined ansible.builtin.debug: var: make_crc_storage_cleanup_with_retries_params - name: Run crc_storage_cleanup_with_retries retries: "{{ make_crc_storage_cleanup_with_retries_retries | default(omit) }}" delay: "{{ make_crc_storage_cleanup_with_retries_delay | default(omit) }}" until: "{{ make_crc_storage_cleanup_with_retries_until | default(true) }}" register: "make_crc_storage_cleanup_with_retries_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_storage_cleanup_with_retries" dry_run: "{{ make_crc_storage_cleanup_with_retries_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_storage_cleanup_with_retries_env|default({})), **(make_crc_storage_cleanup_with_retries_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_operator_namespace.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_operator0000644000175000017500000000170615134411253033425 0ustar zuulzuul--- - name: Debug make_operator_namespace_env when: make_operator_namespace_env is defined ansible.builtin.debug: var: make_operator_namespace_env - name: Debug make_operator_namespace_params when: make_operator_namespace_params is defined ansible.builtin.debug: var: make_operator_namespace_params - name: Run operator_namespace retries: "{{ make_operator_namespace_retries | default(omit) }}" delay: "{{ make_operator_namespace_delay | default(omit) }}" until: "{{ make_operator_namespace_until | default(true) }}" register: "make_operator_namespace_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make operator_namespace" dry_run: "{{ make_operator_namespace_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_operator_namespace_env|default({})), **(make_operator_namespace_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespace.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespac0000644000175000017500000000147715134411253033366 0ustar zuulzuul--- - name: Debug make_namespace_env when: make_namespace_env is defined ansible.builtin.debug: var: make_namespace_env - name: Debug make_namespace_params when: make_namespace_params is defined ansible.builtin.debug: var: make_namespace_params - name: Run namespace retries: "{{ make_namespace_retries | default(omit) }}" delay: "{{ make_namespace_delay | default(omit) }}" until: "{{ make_namespace_until | default(true) }}" register: "make_namespace_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make namespace" dry_run: "{{ make_namespace_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_namespace_env|default({})), **(make_namespace_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespace_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_namespac0000644000175000017500000000166715134411253033367 0ustar zuulzuul--- - name: Debug make_namespace_cleanup_env when: make_namespace_cleanup_env is defined ansible.builtin.debug: var: make_namespace_cleanup_env - name: Debug make_namespace_cleanup_params when: make_namespace_cleanup_params is defined ansible.builtin.debug: var: make_namespace_cleanup_params - name: Run namespace_cleanup retries: "{{ make_namespace_cleanup_retries | default(omit) }}" delay: "{{ make_namespace_cleanup_delay | default(omit) }}" until: "{{ make_namespace_cleanup_until | default(true) }}" register: "make_namespace_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make namespace_cleanup" dry_run: "{{ make_namespace_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_namespace_cleanup_env|default({})), **(make_namespace_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input.ym0000644000175000017500000000140315134411253033347 0ustar zuulzuul--- - name: Debug make_input_env when: make_input_env is defined ansible.builtin.debug: var: make_input_env - name: Debug make_input_params when: make_input_params is defined ansible.builtin.debug: var: make_input_params - name: Run input retries: "{{ make_input_retries | default(omit) }}" delay: "{{ make_input_delay | default(omit) }}" until: "{{ make_input_until | default(true) }}" register: "make_input_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make input" dry_run: "{{ make_input_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_input_env|default({})), **(make_input_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_input_cl0000644000175000017500000000157315134411253033411 0ustar zuulzuul--- - name: Debug make_input_cleanup_env when: make_input_cleanup_env is defined ansible.builtin.debug: var: make_input_cleanup_env - name: Debug make_input_cleanup_params when: make_input_cleanup_params is defined ansible.builtin.debug: var: make_input_cleanup_params - name: Run input_cleanup retries: "{{ make_input_cleanup_retries | default(omit) }}" delay: "{{ make_input_cleanup_delay | default(omit) }}" until: "{{ make_input_cleanup_until | default(true) }}" register: "make_input_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make input_cleanup" dry_run: "{{ make_input_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_input_cleanup_env|default({})), **(make_input_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_setup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_0000644000175000017500000000157315134411253033337 0ustar zuulzuul--- - name: Debug make_crc_bmo_setup_env when: make_crc_bmo_setup_env is defined ansible.builtin.debug: var: make_crc_bmo_setup_env - name: Debug make_crc_bmo_setup_params when: make_crc_bmo_setup_params is defined ansible.builtin.debug: var: make_crc_bmo_setup_params - name: Run crc_bmo_setup retries: "{{ make_crc_bmo_setup_retries | default(omit) }}" delay: "{{ make_crc_bmo_setup_delay | default(omit) }}" until: "{{ make_crc_bmo_setup_until | default(true) }}" register: "make_crc_bmo_setup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_bmo_setup" dry_run: "{{ make_crc_bmo_setup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_bmo_setup_env|default({})), **(make_crc_bmo_setup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_bmo_0000644000175000017500000000163115134411253033332 0ustar zuulzuul--- - name: Debug make_crc_bmo_cleanup_env when: make_crc_bmo_cleanup_env is defined ansible.builtin.debug: var: make_crc_bmo_cleanup_env - name: Debug make_crc_bmo_cleanup_params when: make_crc_bmo_cleanup_params is defined ansible.builtin.debug: var: make_crc_bmo_cleanup_params - name: Run crc_bmo_cleanup retries: "{{ make_crc_bmo_cleanup_retries | default(omit) }}" delay: "{{ make_crc_bmo_cleanup_delay | default(omit) }}" until: "{{ make_crc_bmo_cleanup_until | default(true) }}" register: "make_crc_bmo_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make crc_bmo_cleanup" dry_run: "{{ make_crc_bmo_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_bmo_cleanup_env|default({})), **(make_crc_bmo_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000161215134411253033402 0ustar zuulzuul--- - name: Debug make_openstack_prep_env when: make_openstack_prep_env is defined ansible.builtin.debug: var: make_openstack_prep_env - name: Debug make_openstack_prep_params when: make_openstack_prep_params is defined ansible.builtin.debug: var: make_openstack_prep_params - name: Run openstack_prep retries: "{{ make_openstack_prep_retries | default(omit) }}" delay: "{{ make_openstack_prep_delay | default(omit) }}" until: "{{ make_openstack_prep_until | default(true) }}" register: "make_openstack_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_prep" dry_run: "{{ make_openstack_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_prep_env|default({})), **(make_openstack_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000147715134411253033413 0ustar zuulzuul--- - name: Debug make_openstack_env when: make_openstack_env is defined ansible.builtin.debug: var: make_openstack_env - name: Debug make_openstack_params when: make_openstack_params is defined ansible.builtin.debug: var: make_openstack_params - name: Run openstack retries: "{{ make_openstack_retries | default(omit) }}" delay: "{{ make_openstack_delay | default(omit) }}" until: "{{ make_openstack_until | default(true) }}" register: "make_openstack_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack" dry_run: "{{ make_openstack_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_env|default({})), **(make_openstack_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_wait.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000161215134411253033402 0ustar zuulzuul--- - name: Debug make_openstack_wait_env when: make_openstack_wait_env is defined ansible.builtin.debug: var: make_openstack_wait_env - name: Debug make_openstack_wait_params when: make_openstack_wait_params is defined ansible.builtin.debug: var: make_openstack_wait_params - name: Run openstack_wait retries: "{{ make_openstack_wait_retries | default(omit) }}" delay: "{{ make_openstack_wait_delay | default(omit) }}" until: "{{ make_openstack_wait_until | default(true) }}" register: "make_openstack_wait_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_wait" dry_run: "{{ make_openstack_wait_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_wait_env|default({})), **(make_openstack_wait_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_init.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000161215134411253033402 0ustar zuulzuul--- - name: Debug make_openstack_init_env when: make_openstack_init_env is defined ansible.builtin.debug: var: make_openstack_init_env - name: Debug make_openstack_init_params when: make_openstack_init_params is defined ansible.builtin.debug: var: make_openstack_init_params - name: Run openstack_init retries: "{{ make_openstack_init_retries | default(omit) }}" delay: "{{ make_openstack_init_delay | default(omit) }}" until: "{{ make_openstack_init_until | default(true) }}" register: "make_openstack_init_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_init" dry_run: "{{ make_openstack_init_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_init_env|default({})), **(make_openstack_init_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000166715134411253033414 0ustar zuulzuul--- - name: Debug make_openstack_cleanup_env when: make_openstack_cleanup_env is defined ansible.builtin.debug: var: make_openstack_cleanup_env - name: Debug make_openstack_cleanup_params when: make_openstack_cleanup_params is defined ansible.builtin.debug: var: make_openstack_cleanup_params - name: Run openstack_cleanup retries: "{{ make_openstack_cleanup_retries | default(omit) }}" delay: "{{ make_openstack_cleanup_delay | default(omit) }}" until: "{{ make_openstack_cleanup_until | default(true) }}" register: "make_openstack_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_cleanup" dry_run: "{{ make_openstack_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_cleanup_env|default({})), **(make_openstack_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_repo.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000161215134411253033402 0ustar zuulzuul--- - name: Debug make_openstack_repo_env when: make_openstack_repo_env is defined ansible.builtin.debug: var: make_openstack_repo_env - name: Debug make_openstack_repo_params when: make_openstack_repo_params is defined ansible.builtin.debug: var: make_openstack_repo_params - name: Run openstack_repo retries: "{{ make_openstack_repo_retries | default(omit) }}" delay: "{{ make_openstack_repo_delay | default(omit) }}" until: "{{ make_openstack_repo_until | default(true) }}" register: "make_openstack_repo_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_repo" dry_run: "{{ make_openstack_repo_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_repo_env|default({})), **(make_openstack_repo_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000176315134411253033411 0ustar zuulzuul--- - name: Debug make_openstack_deploy_prep_env when: make_openstack_deploy_prep_env is defined ansible.builtin.debug: var: make_openstack_deploy_prep_env - name: Debug make_openstack_deploy_prep_params when: make_openstack_deploy_prep_params is defined ansible.builtin.debug: var: make_openstack_deploy_prep_params - name: Run openstack_deploy_prep retries: "{{ make_openstack_deploy_prep_retries | default(omit) }}" delay: "{{ make_openstack_deploy_prep_delay | default(omit) }}" until: "{{ make_openstack_deploy_prep_until | default(true) }}" register: "make_openstack_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_deploy_prep" dry_run: "{{ make_openstack_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_deploy_prep_env|default({})), **(make_openstack_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000165015134411253033404 0ustar zuulzuul--- - name: Debug make_openstack_deploy_env when: make_openstack_deploy_env is defined ansible.builtin.debug: var: make_openstack_deploy_env - name: Debug make_openstack_deploy_params when: make_openstack_deploy_params is defined ansible.builtin.debug: var: make_openstack_deploy_params - name: Run openstack_deploy retries: "{{ make_openstack_deploy_retries | default(omit) }}" delay: "{{ make_openstack_deploy_delay | default(omit) }}" until: "{{ make_openstack_deploy_until | default(true) }}" register: "make_openstack_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_deploy" dry_run: "{{ make_openstack_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_deploy_env|default({})), **(make_openstack_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_wait_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000176315134411253033411 0ustar zuulzuul--- - name: Debug make_openstack_wait_deploy_env when: make_openstack_wait_deploy_env is defined ansible.builtin.debug: var: make_openstack_wait_deploy_env - name: Debug make_openstack_wait_deploy_params when: make_openstack_wait_deploy_params is defined ansible.builtin.debug: var: make_openstack_wait_deploy_params - name: Run openstack_wait_deploy retries: "{{ make_openstack_wait_deploy_retries | default(omit) }}" delay: "{{ make_openstack_wait_deploy_delay | default(omit) }}" until: "{{ make_openstack_wait_deploy_until | default(true) }}" register: "make_openstack_wait_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_wait_deploy" dry_run: "{{ make_openstack_wait_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_wait_deploy_env|default({})), **(make_openstack_wait_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000204015134411253033376 0ustar zuulzuul--- - name: Debug make_openstack_deploy_cleanup_env when: make_openstack_deploy_cleanup_env is defined ansible.builtin.debug: var: make_openstack_deploy_cleanup_env - name: Debug make_openstack_deploy_cleanup_params when: make_openstack_deploy_cleanup_params is defined ansible.builtin.debug: var: make_openstack_deploy_cleanup_params - name: Run openstack_deploy_cleanup retries: "{{ make_openstack_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_openstack_deploy_cleanup_delay | default(omit) }}" until: "{{ make_openstack_deploy_cleanup_until | default(true) }}" register: "make_openstack_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_deploy_cleanup" dry_run: "{{ make_openstack_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_deploy_cleanup_env|default({})), **(make_openstack_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_update_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000174415134411253033410 0ustar zuulzuul--- - name: Debug make_openstack_update_run_env when: make_openstack_update_run_env is defined ansible.builtin.debug: var: make_openstack_update_run_env - name: Debug make_openstack_update_run_params when: make_openstack_update_run_params is defined ansible.builtin.debug: var: make_openstack_update_run_params - name: Run openstack_update_run retries: "{{ make_openstack_update_run_retries | default(omit) }}" delay: "{{ make_openstack_update_run_delay | default(omit) }}" until: "{{ make_openstack_update_run_until | default(true) }}" register: "make_openstack_update_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_update_run" dry_run: "{{ make_openstack_update_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_update_run_env|default({})), **(make_openstack_update_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_services.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_s0000644000175000017500000000163115134411253033373 0ustar zuulzuul--- - name: Debug make_update_services_env when: make_update_services_env is defined ansible.builtin.debug: var: make_update_services_env - name: Debug make_update_services_params when: make_update_services_params is defined ansible.builtin.debug: var: make_update_services_params - name: Run update_services retries: "{{ make_update_services_retries | default(omit) }}" delay: "{{ make_update_services_delay | default(omit) }}" until: "{{ make_update_services_until | default(true) }}" register: "make_update_services_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make update_services" dry_run: "{{ make_update_services_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_update_services_env|default({})), **(make_update_services_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_system.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_update_s0000644000175000017500000000157315134411253033400 0ustar zuulzuul--- - name: Debug make_update_system_env when: make_update_system_env is defined ansible.builtin.debug: var: make_update_system_env - name: Debug make_update_system_params when: make_update_system_params is defined ansible.builtin.debug: var: make_update_system_params - name: Run update_system retries: "{{ make_update_system_retries | default(omit) }}" delay: "{{ make_update_system_delay | default(omit) }}" until: "{{ make_update_system_until | default(true) }}" register: "make_update_system_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make update_system" dry_run: "{{ make_update_system_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_update_system_env|default({})), **(make_update_system_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_patch_version.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000202115134411253033375 0ustar zuulzuul--- - name: Debug make_openstack_patch_version_env when: make_openstack_patch_version_env is defined ansible.builtin.debug: var: make_openstack_patch_version_env - name: Debug make_openstack_patch_version_params when: make_openstack_patch_version_params is defined ansible.builtin.debug: var: make_openstack_patch_version_params - name: Run openstack_patch_version retries: "{{ make_openstack_patch_version_retries | default(omit) }}" delay: "{{ make_openstack_patch_version_delay | default(omit) }}" until: "{{ make_openstack_patch_version_until | default(true) }}" register: "make_openstack_patch_version_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_patch_version" dry_run: "{{ make_openstack_patch_version_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_patch_version_env|default({})), **(make_openstack_patch_version_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_generate_keys.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000205715134411253033347 0ustar zuulzuul--- - name: Debug make_edpm_deploy_generate_keys_env when: make_edpm_deploy_generate_keys_env is defined ansible.builtin.debug: var: make_edpm_deploy_generate_keys_env - name: Debug make_edpm_deploy_generate_keys_params when: make_edpm_deploy_generate_keys_params is defined ansible.builtin.debug: var: make_edpm_deploy_generate_keys_params - name: Run edpm_deploy_generate_keys retries: "{{ make_edpm_deploy_generate_keys_retries | default(omit) }}" delay: "{{ make_edpm_deploy_generate_keys_delay | default(omit) }}" until: "{{ make_edpm_deploy_generate_keys_until | default(true) }}" register: "make_edpm_deploy_generate_keys_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_generate_keys" dry_run: "{{ make_edpm_deploy_generate_keys_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_generate_keys_env|default({})), **(make_edpm_deploy_generate_keys_params|default({}))) }}" ././@LongLink0000644000000000000000000000020000000000000011573 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_patch_ansible_runner_image.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_pat0000644000175000017500000000221115134411253033353 0ustar zuulzuul--- - name: Debug make_edpm_patch_ansible_runner_image_env when: make_edpm_patch_ansible_runner_image_env is defined ansible.builtin.debug: var: make_edpm_patch_ansible_runner_image_env - name: Debug make_edpm_patch_ansible_runner_image_params when: make_edpm_patch_ansible_runner_image_params is defined ansible.builtin.debug: var: make_edpm_patch_ansible_runner_image_params - name: Run edpm_patch_ansible_runner_image retries: "{{ make_edpm_patch_ansible_runner_image_retries | default(omit) }}" delay: "{{ make_edpm_patch_ansible_runner_image_delay | default(omit) }}" until: "{{ make_edpm_patch_ansible_runner_image_until | default(true) }}" register: "make_edpm_patch_ansible_runner_image_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_patch_ansible_runner_image" dry_run: "{{ make_edpm_patch_ansible_runner_image_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_patch_ansible_runner_image_env|default({})), **(make_edpm_patch_ansible_runner_image_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000165015134411253033345 0ustar zuulzuul--- - name: Debug make_edpm_deploy_prep_env when: make_edpm_deploy_prep_env is defined ansible.builtin.debug: var: make_edpm_deploy_prep_env - name: Debug make_edpm_deploy_prep_params when: make_edpm_deploy_prep_params is defined ansible.builtin.debug: var: make_edpm_deploy_prep_params - name: Run edpm_deploy_prep retries: "{{ make_edpm_deploy_prep_retries | default(omit) }}" delay: "{{ make_edpm_deploy_prep_delay | default(omit) }}" until: "{{ make_edpm_deploy_prep_until | default(true) }}" register: "make_edpm_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_prep" dry_run: "{{ make_edpm_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_prep_env|default({})), **(make_edpm_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000172515134411253033350 0ustar zuulzuul--- - name: Debug make_edpm_deploy_cleanup_env when: make_edpm_deploy_cleanup_env is defined ansible.builtin.debug: var: make_edpm_deploy_cleanup_env - name: Debug make_edpm_deploy_cleanup_params when: make_edpm_deploy_cleanup_params is defined ansible.builtin.debug: var: make_edpm_deploy_cleanup_params - name: Run edpm_deploy_cleanup retries: "{{ make_edpm_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_deploy_cleanup_delay | default(omit) }}" until: "{{ make_edpm_deploy_cleanup_until | default(true) }}" register: "make_edpm_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_cleanup" dry_run: "{{ make_edpm_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_cleanup_env|default({})), **(make_edpm_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000153515134411253033347 0ustar zuulzuul--- - name: Debug make_edpm_deploy_env when: make_edpm_deploy_env is defined ansible.builtin.debug: var: make_edpm_deploy_env - name: Debug make_edpm_deploy_params when: make_edpm_deploy_params is defined ansible.builtin.debug: var: make_edpm_deploy_params - name: Run edpm_deploy retries: "{{ make_edpm_deploy_retries | default(omit) }}" delay: "{{ make_edpm_deploy_delay | default(omit) }}" until: "{{ make_edpm_deploy_until | default(true) }}" register: "make_edpm_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy" dry_run: "{{ make_edpm_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_env|default({})), **(make_edpm_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_baremetal_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000207615134411253033350 0ustar zuulzuul--- - name: Debug make_edpm_deploy_baremetal_prep_env when: make_edpm_deploy_baremetal_prep_env is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_prep_env - name: Debug make_edpm_deploy_baremetal_prep_params when: make_edpm_deploy_baremetal_prep_params is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_prep_params - name: Run edpm_deploy_baremetal_prep retries: "{{ make_edpm_deploy_baremetal_prep_retries | default(omit) }}" delay: "{{ make_edpm_deploy_baremetal_prep_delay | default(omit) }}" until: "{{ make_edpm_deploy_baremetal_prep_until | default(true) }}" register: "make_edpm_deploy_baremetal_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_baremetal_prep" dry_run: "{{ make_edpm_deploy_baremetal_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_baremetal_prep_env|default({})), **(make_edpm_deploy_baremetal_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_baremetal.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000176315134411253033352 0ustar zuulzuul--- - name: Debug make_edpm_deploy_baremetal_env when: make_edpm_deploy_baremetal_env is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_env - name: Debug make_edpm_deploy_baremetal_params when: make_edpm_deploy_baremetal_params is defined ansible.builtin.debug: var: make_edpm_deploy_baremetal_params - name: Run edpm_deploy_baremetal retries: "{{ make_edpm_deploy_baremetal_retries | default(omit) }}" delay: "{{ make_edpm_deploy_baremetal_delay | default(omit) }}" until: "{{ make_edpm_deploy_baremetal_until | default(true) }}" register: "make_edpm_deploy_baremetal_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_baremetal" dry_run: "{{ make_edpm_deploy_baremetal_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_baremetal_env|default({})), **(make_edpm_deploy_baremetal_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wait_deploy_baremetal.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wai0000644000175000017500000000207615134411253033360 0ustar zuulzuul--- - name: Debug make_edpm_wait_deploy_baremetal_env when: make_edpm_wait_deploy_baremetal_env is defined ansible.builtin.debug: var: make_edpm_wait_deploy_baremetal_env - name: Debug make_edpm_wait_deploy_baremetal_params when: make_edpm_wait_deploy_baremetal_params is defined ansible.builtin.debug: var: make_edpm_wait_deploy_baremetal_params - name: Run edpm_wait_deploy_baremetal retries: "{{ make_edpm_wait_deploy_baremetal_retries | default(omit) }}" delay: "{{ make_edpm_wait_deploy_baremetal_delay | default(omit) }}" until: "{{ make_edpm_wait_deploy_baremetal_until | default(true) }}" register: "make_edpm_wait_deploy_baremetal_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_wait_deploy_baremetal" dry_run: "{{ make_edpm_wait_deploy_baremetal_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_wait_deploy_baremetal_env|default({})), **(make_edpm_wait_deploy_baremetal_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wait_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_wai0000644000175000017500000000165015134411253033355 0ustar zuulzuul--- - name: Debug make_edpm_wait_deploy_env when: make_edpm_wait_deploy_env is defined ansible.builtin.debug: var: make_edpm_wait_deploy_env - name: Debug make_edpm_wait_deploy_params when: make_edpm_wait_deploy_params is defined ansible.builtin.debug: var: make_edpm_wait_deploy_params - name: Run edpm_wait_deploy retries: "{{ make_edpm_wait_deploy_retries | default(omit) }}" delay: "{{ make_edpm_wait_deploy_delay | default(omit) }}" until: "{{ make_edpm_wait_deploy_until | default(true) }}" register: "make_edpm_wait_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_wait_deploy" dry_run: "{{ make_edpm_wait_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_wait_deploy_env|default({})), **(make_edpm_wait_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_register_dns.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_reg0000644000175000017500000000166715134411253033362 0ustar zuulzuul--- - name: Debug make_edpm_register_dns_env when: make_edpm_register_dns_env is defined ansible.builtin.debug: var: make_edpm_register_dns_env - name: Debug make_edpm_register_dns_params when: make_edpm_register_dns_params is defined ansible.builtin.debug: var: make_edpm_register_dns_params - name: Run edpm_register_dns retries: "{{ make_edpm_register_dns_retries | default(omit) }}" delay: "{{ make_edpm_register_dns_delay | default(omit) }}" until: "{{ make_edpm_register_dns_until | default(true) }}" register: "make_edpm_register_dns_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_register_dns" dry_run: "{{ make_edpm_register_dns_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_register_dns_env|default({})), **(make_edpm_register_dns_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_nova_discover_hosts.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_nov0000644000175000017500000000204015134411253033371 0ustar zuulzuul--- - name: Debug make_edpm_nova_discover_hosts_env when: make_edpm_nova_discover_hosts_env is defined ansible.builtin.debug: var: make_edpm_nova_discover_hosts_env - name: Debug make_edpm_nova_discover_hosts_params when: make_edpm_nova_discover_hosts_params is defined ansible.builtin.debug: var: make_edpm_nova_discover_hosts_params - name: Run edpm_nova_discover_hosts retries: "{{ make_edpm_nova_discover_hosts_retries | default(omit) }}" delay: "{{ make_edpm_nova_discover_hosts_delay | default(omit) }}" until: "{{ make_edpm_nova_discover_hosts_until | default(true) }}" register: "make_edpm_nova_discover_hosts_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_nova_discover_hosts" dry_run: "{{ make_edpm_nova_discover_hosts_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_nova_discover_hosts_env|default({})), **(make_edpm_nova_discover_hosts_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_crds.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000161215134411253033402 0ustar zuulzuul--- - name: Debug make_openstack_crds_env when: make_openstack_crds_env is defined ansible.builtin.debug: var: make_openstack_crds_env - name: Debug make_openstack_crds_params when: make_openstack_crds_params is defined ansible.builtin.debug: var: make_openstack_crds_params - name: Run openstack_crds retries: "{{ make_openstack_crds_retries | default(omit) }}" delay: "{{ make_openstack_crds_delay | default(omit) }}" until: "{{ make_openstack_crds_until | default(true) }}" register: "make_openstack_crds_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_crds" dry_run: "{{ make_openstack_crds_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_crds_env|default({})), **(make_openstack_crds_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_crds_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000200215134411253033374 0ustar zuulzuul--- - name: Debug make_openstack_crds_cleanup_env when: make_openstack_crds_cleanup_env is defined ansible.builtin.debug: var: make_openstack_crds_cleanup_env - name: Debug make_openstack_crds_cleanup_params when: make_openstack_crds_cleanup_params is defined ansible.builtin.debug: var: make_openstack_crds_cleanup_params - name: Run openstack_crds_cleanup retries: "{{ make_openstack_crds_cleanup_retries | default(omit) }}" delay: "{{ make_openstack_crds_cleanup_delay | default(omit) }}" until: "{{ make_openstack_crds_cleanup_until | default(true) }}" register: "make_openstack_crds_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_crds_cleanup" dry_run: "{{ make_openstack_crds_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_crds_cleanup_env|default({})), **(make_openstack_crds_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_networker_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000207615134411253033350 0ustar zuulzuul--- - name: Debug make_edpm_deploy_networker_prep_env when: make_edpm_deploy_networker_prep_env is defined ansible.builtin.debug: var: make_edpm_deploy_networker_prep_env - name: Debug make_edpm_deploy_networker_prep_params when: make_edpm_deploy_networker_prep_params is defined ansible.builtin.debug: var: make_edpm_deploy_networker_prep_params - name: Run edpm_deploy_networker_prep retries: "{{ make_edpm_deploy_networker_prep_retries | default(omit) }}" delay: "{{ make_edpm_deploy_networker_prep_delay | default(omit) }}" until: "{{ make_edpm_deploy_networker_prep_until | default(true) }}" register: "make_edpm_deploy_networker_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_networker_prep" dry_run: "{{ make_edpm_deploy_networker_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_networker_prep_env|default({})), **(make_edpm_deploy_networker_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000017600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_networker_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000215315134411253033344 0ustar zuulzuul--- - name: Debug make_edpm_deploy_networker_cleanup_env when: make_edpm_deploy_networker_cleanup_env is defined ansible.builtin.debug: var: make_edpm_deploy_networker_cleanup_env - name: Debug make_edpm_deploy_networker_cleanup_params when: make_edpm_deploy_networker_cleanup_params is defined ansible.builtin.debug: var: make_edpm_deploy_networker_cleanup_params - name: Run edpm_deploy_networker_cleanup retries: "{{ make_edpm_deploy_networker_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_deploy_networker_cleanup_delay | default(omit) }}" until: "{{ make_edpm_deploy_networker_cleanup_until | default(true) }}" register: "make_edpm_deploy_networker_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_networker_cleanup" dry_run: "{{ make_edpm_deploy_networker_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_networker_cleanup_env|default({})), **(make_edpm_deploy_networker_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_networker.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000176315134411253033352 0ustar zuulzuul--- - name: Debug make_edpm_deploy_networker_env when: make_edpm_deploy_networker_env is defined ansible.builtin.debug: var: make_edpm_deploy_networker_env - name: Debug make_edpm_deploy_networker_params when: make_edpm_deploy_networker_params is defined ansible.builtin.debug: var: make_edpm_deploy_networker_params - name: Run edpm_deploy_networker retries: "{{ make_edpm_deploy_networker_retries | default(omit) }}" delay: "{{ make_edpm_deploy_networker_delay | default(omit) }}" until: "{{ make_edpm_deploy_networker_until | default(true) }}" register: "make_edpm_deploy_networker_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make edpm_deploy_networker" dry_run: "{{ make_edpm_deploy_networker_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_networker_env|default({})), **(make_edpm_deploy_networker_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_pr0000644000175000017500000000151615134411253033371 0ustar zuulzuul--- - name: Debug make_infra_prep_env when: make_infra_prep_env is defined ansible.builtin.debug: var: make_infra_prep_env - name: Debug make_infra_prep_params when: make_infra_prep_params is defined ansible.builtin.debug: var: make_infra_prep_params - name: Run infra_prep retries: "{{ make_infra_prep_retries | default(omit) }}" delay: "{{ make_infra_prep_delay | default(omit) }}" until: "{{ make_infra_prep_until | default(true) }}" register: "make_infra_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_prep" dry_run: "{{ make_infra_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_prep_env|default({})), **(make_infra_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra.ym0000644000175000017500000000140315134411253033307 0ustar zuulzuul--- - name: Debug make_infra_env when: make_infra_env is defined ansible.builtin.debug: var: make_infra_env - name: Debug make_infra_params when: make_infra_params is defined ansible.builtin.debug: var: make_infra_params - name: Run infra retries: "{{ make_infra_retries | default(omit) }}" delay: "{{ make_infra_delay | default(omit) }}" until: "{{ make_infra_until | default(true) }}" register: "make_infra_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra" dry_run: "{{ make_infra_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_env|default({})), **(make_infra_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_cl0000644000175000017500000000157315134411253033351 0ustar zuulzuul--- - name: Debug make_infra_cleanup_env when: make_infra_cleanup_env is defined ansible.builtin.debug: var: make_infra_cleanup_env - name: Debug make_infra_cleanup_params when: make_infra_cleanup_params is defined ansible.builtin.debug: var: make_infra_cleanup_params - name: Run infra_cleanup retries: "{{ make_infra_cleanup_retries | default(omit) }}" delay: "{{ make_infra_cleanup_delay | default(omit) }}" until: "{{ make_infra_cleanup_until | default(true) }}" register: "make_infra_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_cleanup" dry_run: "{{ make_infra_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_cleanup_env|default({})), **(make_infra_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_depl0000644000175000017500000000163115134411253033357 0ustar zuulzuul--- - name: Debug make_dns_deploy_prep_env when: make_dns_deploy_prep_env is defined ansible.builtin.debug: var: make_dns_deploy_prep_env - name: Debug make_dns_deploy_prep_params when: make_dns_deploy_prep_params is defined ansible.builtin.debug: var: make_dns_deploy_prep_params - name: Run dns_deploy_prep retries: "{{ make_dns_deploy_prep_retries | default(omit) }}" delay: "{{ make_dns_deploy_prep_delay | default(omit) }}" until: "{{ make_dns_deploy_prep_until | default(true) }}" register: "make_dns_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make dns_deploy_prep" dry_run: "{{ make_dns_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_dns_deploy_prep_env|default({})), **(make_dns_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_depl0000644000175000017500000000151615134411253033361 0ustar zuulzuul--- - name: Debug make_dns_deploy_env when: make_dns_deploy_env is defined ansible.builtin.debug: var: make_dns_deploy_env - name: Debug make_dns_deploy_params when: make_dns_deploy_params is defined ansible.builtin.debug: var: make_dns_deploy_params - name: Run dns_deploy retries: "{{ make_dns_deploy_retries | default(omit) }}" delay: "{{ make_dns_deploy_delay | default(omit) }}" until: "{{ make_dns_deploy_until | default(true) }}" register: "make_dns_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make dns_deploy" dry_run: "{{ make_dns_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_dns_deploy_env|default({})), **(make_dns_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_dns_depl0000644000175000017500000000170615134411253033362 0ustar zuulzuul--- - name: Debug make_dns_deploy_cleanup_env when: make_dns_deploy_cleanup_env is defined ansible.builtin.debug: var: make_dns_deploy_cleanup_env - name: Debug make_dns_deploy_cleanup_params when: make_dns_deploy_cleanup_params is defined ansible.builtin.debug: var: make_dns_deploy_cleanup_params - name: Run dns_deploy_cleanup retries: "{{ make_dns_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_dns_deploy_cleanup_delay | default(omit) }}" until: "{{ make_dns_deploy_cleanup_until | default(true) }}" register: "make_dns_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make dns_deploy_cleanup" dry_run: "{{ make_dns_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_dns_deploy_cleanup_env|default({})), **(make_dns_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfig_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfi0000644000175000017500000000176315134411253033402 0ustar zuulzuul--- - name: Debug make_netconfig_deploy_prep_env when: make_netconfig_deploy_prep_env is defined ansible.builtin.debug: var: make_netconfig_deploy_prep_env - name: Debug make_netconfig_deploy_prep_params when: make_netconfig_deploy_prep_params is defined ansible.builtin.debug: var: make_netconfig_deploy_prep_params - name: Run netconfig_deploy_prep retries: "{{ make_netconfig_deploy_prep_retries | default(omit) }}" delay: "{{ make_netconfig_deploy_prep_delay | default(omit) }}" until: "{{ make_netconfig_deploy_prep_until | default(true) }}" register: "make_netconfig_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netconfig_deploy_prep" dry_run: "{{ make_netconfig_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netconfig_deploy_prep_env|default({})), **(make_netconfig_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfig_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfi0000644000175000017500000000165015134411253033375 0ustar zuulzuul--- - name: Debug make_netconfig_deploy_env when: make_netconfig_deploy_env is defined ansible.builtin.debug: var: make_netconfig_deploy_env - name: Debug make_netconfig_deploy_params when: make_netconfig_deploy_params is defined ansible.builtin.debug: var: make_netconfig_deploy_params - name: Run netconfig_deploy retries: "{{ make_netconfig_deploy_retries | default(omit) }}" delay: "{{ make_netconfig_deploy_delay | default(omit) }}" until: "{{ make_netconfig_deploy_until | default(true) }}" register: "make_netconfig_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netconfig_deploy" dry_run: "{{ make_netconfig_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netconfig_deploy_env|default({})), **(make_netconfig_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfig_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netconfi0000644000175000017500000000204015134411253033367 0ustar zuulzuul--- - name: Debug make_netconfig_deploy_cleanup_env when: make_netconfig_deploy_cleanup_env is defined ansible.builtin.debug: var: make_netconfig_deploy_cleanup_env - name: Debug make_netconfig_deploy_cleanup_params when: make_netconfig_deploy_cleanup_params is defined ansible.builtin.debug: var: make_netconfig_deploy_cleanup_params - name: Run netconfig_deploy_cleanup retries: "{{ make_netconfig_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_netconfig_deploy_cleanup_delay | default(omit) }}" until: "{{ make_netconfig_deploy_cleanup_until | default(true) }}" register: "make_netconfig_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netconfig_deploy_cleanup" dry_run: "{{ make_netconfig_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netconfig_deploy_cleanup_env|default({})), **(make_netconfig_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcached_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcache0000644000175000017500000000176315134411253033337 0ustar zuulzuul--- - name: Debug make_memcached_deploy_prep_env when: make_memcached_deploy_prep_env is defined ansible.builtin.debug: var: make_memcached_deploy_prep_env - name: Debug make_memcached_deploy_prep_params when: make_memcached_deploy_prep_params is defined ansible.builtin.debug: var: make_memcached_deploy_prep_params - name: Run memcached_deploy_prep retries: "{{ make_memcached_deploy_prep_retries | default(omit) }}" delay: "{{ make_memcached_deploy_prep_delay | default(omit) }}" until: "{{ make_memcached_deploy_prep_until | default(true) }}" register: "make_memcached_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make memcached_deploy_prep" dry_run: "{{ make_memcached_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_memcached_deploy_prep_env|default({})), **(make_memcached_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcached_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcache0000644000175000017500000000165015134411253033332 0ustar zuulzuul--- - name: Debug make_memcached_deploy_env when: make_memcached_deploy_env is defined ansible.builtin.debug: var: make_memcached_deploy_env - name: Debug make_memcached_deploy_params when: make_memcached_deploy_params is defined ansible.builtin.debug: var: make_memcached_deploy_params - name: Run memcached_deploy retries: "{{ make_memcached_deploy_retries | default(omit) }}" delay: "{{ make_memcached_deploy_delay | default(omit) }}" until: "{{ make_memcached_deploy_until | default(true) }}" register: "make_memcached_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make memcached_deploy" dry_run: "{{ make_memcached_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_memcached_deploy_env|default({})), **(make_memcached_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcached_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_memcache0000644000175000017500000000204015134411253033324 0ustar zuulzuul--- - name: Debug make_memcached_deploy_cleanup_env when: make_memcached_deploy_cleanup_env is defined ansible.builtin.debug: var: make_memcached_deploy_cleanup_env - name: Debug make_memcached_deploy_cleanup_params when: make_memcached_deploy_cleanup_params is defined ansible.builtin.debug: var: make_memcached_deploy_cleanup_params - name: Run memcached_deploy_cleanup retries: "{{ make_memcached_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_memcached_deploy_cleanup_delay | default(omit) }}" until: "{{ make_memcached_deploy_cleanup_until | default(true) }}" register: "make_memcached_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make memcached_deploy_cleanup" dry_run: "{{ make_memcached_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_memcached_deploy_cleanup_env|default({})), **(make_memcached_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000157315134411253033435 0ustar zuulzuul--- - name: Debug make_keystone_prep_env when: make_keystone_prep_env is defined ansible.builtin.debug: var: make_keystone_prep_env - name: Debug make_keystone_prep_params when: make_keystone_prep_params is defined ansible.builtin.debug: var: make_keystone_prep_params - name: Run keystone_prep retries: "{{ make_keystone_prep_retries | default(omit) }}" delay: "{{ make_keystone_prep_delay | default(omit) }}" until: "{{ make_keystone_prep_until | default(true) }}" register: "make_keystone_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_prep" dry_run: "{{ make_keystone_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_prep_env|default({})), **(make_keystone_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000146015134411253033430 0ustar zuulzuul--- - name: Debug make_keystone_env when: make_keystone_env is defined ansible.builtin.debug: var: make_keystone_env - name: Debug make_keystone_params when: make_keystone_params is defined ansible.builtin.debug: var: make_keystone_params - name: Run keystone retries: "{{ make_keystone_retries | default(omit) }}" delay: "{{ make_keystone_delay | default(omit) }}" until: "{{ make_keystone_until | default(true) }}" register: "make_keystone_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone" dry_run: "{{ make_keystone_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_env|default({})), **(make_keystone_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000165015134411253033431 0ustar zuulzuul--- - name: Debug make_keystone_cleanup_env when: make_keystone_cleanup_env is defined ansible.builtin.debug: var: make_keystone_cleanup_env - name: Debug make_keystone_cleanup_params when: make_keystone_cleanup_params is defined ansible.builtin.debug: var: make_keystone_cleanup_params - name: Run keystone_cleanup retries: "{{ make_keystone_cleanup_retries | default(omit) }}" delay: "{{ make_keystone_cleanup_delay | default(omit) }}" until: "{{ make_keystone_cleanup_until | default(true) }}" register: "make_keystone_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_cleanup" dry_run: "{{ make_keystone_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_cleanup_env|default({})), **(make_keystone_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000174415134411253033435 0ustar zuulzuul--- - name: Debug make_keystone_deploy_prep_env when: make_keystone_deploy_prep_env is defined ansible.builtin.debug: var: make_keystone_deploy_prep_env - name: Debug make_keystone_deploy_prep_params when: make_keystone_deploy_prep_params is defined ansible.builtin.debug: var: make_keystone_deploy_prep_params - name: Run keystone_deploy_prep retries: "{{ make_keystone_deploy_prep_retries | default(omit) }}" delay: "{{ make_keystone_deploy_prep_delay | default(omit) }}" until: "{{ make_keystone_deploy_prep_until | default(true) }}" register: "make_keystone_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_deploy_prep" dry_run: "{{ make_keystone_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_deploy_prep_env|default({})), **(make_keystone_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000163115134411253033430 0ustar zuulzuul--- - name: Debug make_keystone_deploy_env when: make_keystone_deploy_env is defined ansible.builtin.debug: var: make_keystone_deploy_env - name: Debug make_keystone_deploy_params when: make_keystone_deploy_params is defined ansible.builtin.debug: var: make_keystone_deploy_params - name: Run keystone_deploy retries: "{{ make_keystone_deploy_retries | default(omit) }}" delay: "{{ make_keystone_deploy_delay | default(omit) }}" until: "{{ make_keystone_deploy_until | default(true) }}" register: "make_keystone_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_deploy" dry_run: "{{ make_keystone_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_deploy_env|default({})), **(make_keystone_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000202115134411253033422 0ustar zuulzuul--- - name: Debug make_keystone_deploy_cleanup_env when: make_keystone_deploy_cleanup_env is defined ansible.builtin.debug: var: make_keystone_deploy_cleanup_env - name: Debug make_keystone_deploy_cleanup_params when: make_keystone_deploy_cleanup_params is defined ansible.builtin.debug: var: make_keystone_deploy_cleanup_params - name: Run keystone_deploy_cleanup retries: "{{ make_keystone_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_keystone_deploy_cleanup_delay | default(omit) }}" until: "{{ make_keystone_deploy_cleanup_until | default(true) }}" register: "make_keystone_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_deploy_cleanup" dry_run: "{{ make_keystone_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_deploy_cleanup_env|default({})), **(make_keystone_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000157315134411253033335 0ustar zuulzuul--- - name: Debug make_barbican_prep_env when: make_barbican_prep_env is defined ansible.builtin.debug: var: make_barbican_prep_env - name: Debug make_barbican_prep_params when: make_barbican_prep_params is defined ansible.builtin.debug: var: make_barbican_prep_params - name: Run barbican_prep retries: "{{ make_barbican_prep_retries | default(omit) }}" delay: "{{ make_barbican_prep_delay | default(omit) }}" until: "{{ make_barbican_prep_until | default(true) }}" register: "make_barbican_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_prep" dry_run: "{{ make_barbican_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_prep_env|default({})), **(make_barbican_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000146015134411253033330 0ustar zuulzuul--- - name: Debug make_barbican_env when: make_barbican_env is defined ansible.builtin.debug: var: make_barbican_env - name: Debug make_barbican_params when: make_barbican_params is defined ansible.builtin.debug: var: make_barbican_params - name: Run barbican retries: "{{ make_barbican_retries | default(omit) }}" delay: "{{ make_barbican_delay | default(omit) }}" until: "{{ make_barbican_until | default(true) }}" register: "make_barbican_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican" dry_run: "{{ make_barbican_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_env|default({})), **(make_barbican_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000165015134411253033331 0ustar zuulzuul--- - name: Debug make_barbican_cleanup_env when: make_barbican_cleanup_env is defined ansible.builtin.debug: var: make_barbican_cleanup_env - name: Debug make_barbican_cleanup_params when: make_barbican_cleanup_params is defined ansible.builtin.debug: var: make_barbican_cleanup_params - name: Run barbican_cleanup retries: "{{ make_barbican_cleanup_retries | default(omit) }}" delay: "{{ make_barbican_cleanup_delay | default(omit) }}" until: "{{ make_barbican_cleanup_until | default(true) }}" register: "make_barbican_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_cleanup" dry_run: "{{ make_barbican_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_cleanup_env|default({})), **(make_barbican_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000174415134411253033335 0ustar zuulzuul--- - name: Debug make_barbican_deploy_prep_env when: make_barbican_deploy_prep_env is defined ansible.builtin.debug: var: make_barbican_deploy_prep_env - name: Debug make_barbican_deploy_prep_params when: make_barbican_deploy_prep_params is defined ansible.builtin.debug: var: make_barbican_deploy_prep_params - name: Run barbican_deploy_prep retries: "{{ make_barbican_deploy_prep_retries | default(omit) }}" delay: "{{ make_barbican_deploy_prep_delay | default(omit) }}" until: "{{ make_barbican_deploy_prep_until | default(true) }}" register: "make_barbican_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy_prep" dry_run: "{{ make_barbican_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_prep_env|default({})), **(make_barbican_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000163115134411253033330 0ustar zuulzuul--- - name: Debug make_barbican_deploy_env when: make_barbican_deploy_env is defined ansible.builtin.debug: var: make_barbican_deploy_env - name: Debug make_barbican_deploy_params when: make_barbican_deploy_params is defined ansible.builtin.debug: var: make_barbican_deploy_params - name: Run barbican_deploy retries: "{{ make_barbican_deploy_retries | default(omit) }}" delay: "{{ make_barbican_deploy_delay | default(omit) }}" until: "{{ make_barbican_deploy_until | default(true) }}" register: "make_barbican_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy" dry_run: "{{ make_barbican_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_env|default({})), **(make_barbican_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy_validate.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000204015134411253033323 0ustar zuulzuul--- - name: Debug make_barbican_deploy_validate_env when: make_barbican_deploy_validate_env is defined ansible.builtin.debug: var: make_barbican_deploy_validate_env - name: Debug make_barbican_deploy_validate_params when: make_barbican_deploy_validate_params is defined ansible.builtin.debug: var: make_barbican_deploy_validate_params - name: Run barbican_deploy_validate retries: "{{ make_barbican_deploy_validate_retries | default(omit) }}" delay: "{{ make_barbican_deploy_validate_delay | default(omit) }}" until: "{{ make_barbican_deploy_validate_until | default(true) }}" register: "make_barbican_deploy_validate_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy_validate" dry_run: "{{ make_barbican_deploy_validate_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_validate_env|default({})), **(make_barbican_deploy_validate_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000202115134411253033322 0ustar zuulzuul--- - name: Debug make_barbican_deploy_cleanup_env when: make_barbican_deploy_cleanup_env is defined ansible.builtin.debug: var: make_barbican_deploy_cleanup_env - name: Debug make_barbican_deploy_cleanup_params when: make_barbican_deploy_cleanup_params is defined ansible.builtin.debug: var: make_barbican_deploy_cleanup_params - name: Run barbican_deploy_cleanup retries: "{{ make_barbican_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_barbican_deploy_cleanup_delay | default(omit) }}" until: "{{ make_barbican_deploy_cleanup_until | default(true) }}" register: "make_barbican_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_deploy_cleanup" dry_run: "{{ make_barbican_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_deploy_cleanup_env|default({})), **(make_barbican_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb.0000644000175000017500000000144115134411253033243 0ustar zuulzuul--- - name: Debug make_mariadb_env when: make_mariadb_env is defined ansible.builtin.debug: var: make_mariadb_env - name: Debug make_mariadb_params when: make_mariadb_params is defined ansible.builtin.debug: var: make_mariadb_params - name: Run mariadb retries: "{{ make_mariadb_retries | default(omit) }}" delay: "{{ make_mariadb_delay | default(omit) }}" until: "{{ make_mariadb_until | default(true) }}" register: "make_mariadb_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb" dry_run: "{{ make_mariadb_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_env|default({})), **(make_mariadb_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000163115134411253033325 0ustar zuulzuul--- - name: Debug make_mariadb_cleanup_env when: make_mariadb_cleanup_env is defined ansible.builtin.debug: var: make_mariadb_cleanup_env - name: Debug make_mariadb_cleanup_params when: make_mariadb_cleanup_params is defined ansible.builtin.debug: var: make_mariadb_cleanup_params - name: Run mariadb_cleanup retries: "{{ make_mariadb_cleanup_retries | default(omit) }}" delay: "{{ make_mariadb_cleanup_delay | default(omit) }}" until: "{{ make_mariadb_cleanup_until | default(true) }}" register: "make_mariadb_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_cleanup" dry_run: "{{ make_mariadb_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_cleanup_env|default({})), **(make_mariadb_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000172515134411253033331 0ustar zuulzuul--- - name: Debug make_mariadb_deploy_prep_env when: make_mariadb_deploy_prep_env is defined ansible.builtin.debug: var: make_mariadb_deploy_prep_env - name: Debug make_mariadb_deploy_prep_params when: make_mariadb_deploy_prep_params is defined ansible.builtin.debug: var: make_mariadb_deploy_prep_params - name: Run mariadb_deploy_prep retries: "{{ make_mariadb_deploy_prep_retries | default(omit) }}" delay: "{{ make_mariadb_deploy_prep_delay | default(omit) }}" until: "{{ make_mariadb_deploy_prep_until | default(true) }}" register: "make_mariadb_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_deploy_prep" dry_run: "{{ make_mariadb_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_deploy_prep_env|default({})), **(make_mariadb_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000161215134411253033324 0ustar zuulzuul--- - name: Debug make_mariadb_deploy_env when: make_mariadb_deploy_env is defined ansible.builtin.debug: var: make_mariadb_deploy_env - name: Debug make_mariadb_deploy_params when: make_mariadb_deploy_params is defined ansible.builtin.debug: var: make_mariadb_deploy_params - name: Run mariadb_deploy retries: "{{ make_mariadb_deploy_retries | default(omit) }}" delay: "{{ make_mariadb_deploy_delay | default(omit) }}" until: "{{ make_mariadb_deploy_until | default(true) }}" register: "make_mariadb_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_deploy" dry_run: "{{ make_mariadb_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_deploy_env|default({})), **(make_mariadb_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000200215134411253033316 0ustar zuulzuul--- - name: Debug make_mariadb_deploy_cleanup_env when: make_mariadb_deploy_cleanup_env is defined ansible.builtin.debug: var: make_mariadb_deploy_cleanup_env - name: Debug make_mariadb_deploy_cleanup_params when: make_mariadb_deploy_cleanup_params is defined ansible.builtin.debug: var: make_mariadb_deploy_cleanup_params - name: Run mariadb_deploy_cleanup retries: "{{ make_mariadb_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_mariadb_deploy_cleanup_delay | default(omit) }}" until: "{{ make_mariadb_deploy_cleanup_until | default(true) }}" register: "make_mariadb_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_deploy_cleanup" dry_run: "{{ make_mariadb_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_deploy_cleanup_env|default({})), **(make_mariadb_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000161215134411253033352 0ustar zuulzuul--- - name: Debug make_placement_prep_env when: make_placement_prep_env is defined ansible.builtin.debug: var: make_placement_prep_env - name: Debug make_placement_prep_params when: make_placement_prep_params is defined ansible.builtin.debug: var: make_placement_prep_params - name: Run placement_prep retries: "{{ make_placement_prep_retries | default(omit) }}" delay: "{{ make_placement_prep_delay | default(omit) }}" until: "{{ make_placement_prep_until | default(true) }}" register: "make_placement_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_prep" dry_run: "{{ make_placement_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_prep_env|default({})), **(make_placement_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000147715134411253033363 0ustar zuulzuul--- - name: Debug make_placement_env when: make_placement_env is defined ansible.builtin.debug: var: make_placement_env - name: Debug make_placement_params when: make_placement_params is defined ansible.builtin.debug: var: make_placement_params - name: Run placement retries: "{{ make_placement_retries | default(omit) }}" delay: "{{ make_placement_delay | default(omit) }}" until: "{{ make_placement_until | default(true) }}" register: "make_placement_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement" dry_run: "{{ make_placement_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_env|default({})), **(make_placement_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000166715134411253033364 0ustar zuulzuul--- - name: Debug make_placement_cleanup_env when: make_placement_cleanup_env is defined ansible.builtin.debug: var: make_placement_cleanup_env - name: Debug make_placement_cleanup_params when: make_placement_cleanup_params is defined ansible.builtin.debug: var: make_placement_cleanup_params - name: Run placement_cleanup retries: "{{ make_placement_cleanup_retries | default(omit) }}" delay: "{{ make_placement_cleanup_delay | default(omit) }}" until: "{{ make_placement_cleanup_until | default(true) }}" register: "make_placement_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_cleanup" dry_run: "{{ make_placement_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_cleanup_env|default({})), **(make_placement_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000176315134411253033361 0ustar zuulzuul--- - name: Debug make_placement_deploy_prep_env when: make_placement_deploy_prep_env is defined ansible.builtin.debug: var: make_placement_deploy_prep_env - name: Debug make_placement_deploy_prep_params when: make_placement_deploy_prep_params is defined ansible.builtin.debug: var: make_placement_deploy_prep_params - name: Run placement_deploy_prep retries: "{{ make_placement_deploy_prep_retries | default(omit) }}" delay: "{{ make_placement_deploy_prep_delay | default(omit) }}" until: "{{ make_placement_deploy_prep_until | default(true) }}" register: "make_placement_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_deploy_prep" dry_run: "{{ make_placement_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_deploy_prep_env|default({})), **(make_placement_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000165015134411253033354 0ustar zuulzuul--- - name: Debug make_placement_deploy_env when: make_placement_deploy_env is defined ansible.builtin.debug: var: make_placement_deploy_env - name: Debug make_placement_deploy_params when: make_placement_deploy_params is defined ansible.builtin.debug: var: make_placement_deploy_params - name: Run placement_deploy retries: "{{ make_placement_deploy_retries | default(omit) }}" delay: "{{ make_placement_deploy_delay | default(omit) }}" until: "{{ make_placement_deploy_until | default(true) }}" register: "make_placement_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_deploy" dry_run: "{{ make_placement_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_deploy_env|default({})), **(make_placement_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000204015134411253033346 0ustar zuulzuul--- - name: Debug make_placement_deploy_cleanup_env when: make_placement_deploy_cleanup_env is defined ansible.builtin.debug: var: make_placement_deploy_cleanup_env - name: Debug make_placement_deploy_cleanup_params when: make_placement_deploy_cleanup_params is defined ansible.builtin.debug: var: make_placement_deploy_cleanup_params - name: Run placement_deploy_cleanup retries: "{{ make_placement_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_placement_deploy_cleanup_delay | default(omit) }}" until: "{{ make_placement_deploy_cleanup_until | default(true) }}" register: "make_placement_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_deploy_cleanup" dry_run: "{{ make_placement_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_deploy_cleanup_env|default({})), **(make_placement_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_p0000644000175000017500000000153515134411253033342 0ustar zuulzuul--- - name: Debug make_glance_prep_env when: make_glance_prep_env is defined ansible.builtin.debug: var: make_glance_prep_env - name: Debug make_glance_prep_params when: make_glance_prep_params is defined ansible.builtin.debug: var: make_glance_prep_params - name: Run glance_prep retries: "{{ make_glance_prep_retries | default(omit) }}" delay: "{{ make_glance_prep_delay | default(omit) }}" until: "{{ make_glance_prep_until | default(true) }}" register: "make_glance_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_prep" dry_run: "{{ make_glance_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_prep_env|default({})), **(make_glance_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance.y0000644000175000017500000000142215134411253033265 0ustar zuulzuul--- - name: Debug make_glance_env when: make_glance_env is defined ansible.builtin.debug: var: make_glance_env - name: Debug make_glance_params when: make_glance_params is defined ansible.builtin.debug: var: make_glance_params - name: Run glance retries: "{{ make_glance_retries | default(omit) }}" delay: "{{ make_glance_delay | default(omit) }}" until: "{{ make_glance_until | default(true) }}" register: "make_glance_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance" dry_run: "{{ make_glance_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_env|default({})), **(make_glance_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_c0000644000175000017500000000161215134411253033321 0ustar zuulzuul--- - name: Debug make_glance_cleanup_env when: make_glance_cleanup_env is defined ansible.builtin.debug: var: make_glance_cleanup_env - name: Debug make_glance_cleanup_params when: make_glance_cleanup_params is defined ansible.builtin.debug: var: make_glance_cleanup_params - name: Run glance_cleanup retries: "{{ make_glance_cleanup_retries | default(omit) }}" delay: "{{ make_glance_cleanup_delay | default(omit) }}" until: "{{ make_glance_cleanup_until | default(true) }}" register: "make_glance_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_cleanup" dry_run: "{{ make_glance_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_cleanup_env|default({})), **(make_glance_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_d0000644000175000017500000000170615134411253033326 0ustar zuulzuul--- - name: Debug make_glance_deploy_prep_env when: make_glance_deploy_prep_env is defined ansible.builtin.debug: var: make_glance_deploy_prep_env - name: Debug make_glance_deploy_prep_params when: make_glance_deploy_prep_params is defined ansible.builtin.debug: var: make_glance_deploy_prep_params - name: Run glance_deploy_prep retries: "{{ make_glance_deploy_prep_retries | default(omit) }}" delay: "{{ make_glance_deploy_prep_delay | default(omit) }}" until: "{{ make_glance_deploy_prep_until | default(true) }}" register: "make_glance_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_deploy_prep" dry_run: "{{ make_glance_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_deploy_prep_env|default({})), **(make_glance_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_d0000644000175000017500000000157315134411253033330 0ustar zuulzuul--- - name: Debug make_glance_deploy_env when: make_glance_deploy_env is defined ansible.builtin.debug: var: make_glance_deploy_env - name: Debug make_glance_deploy_params when: make_glance_deploy_params is defined ansible.builtin.debug: var: make_glance_deploy_params - name: Run glance_deploy retries: "{{ make_glance_deploy_retries | default(omit) }}" delay: "{{ make_glance_deploy_delay | default(omit) }}" until: "{{ make_glance_deploy_until | default(true) }}" register: "make_glance_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_deploy" dry_run: "{{ make_glance_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_deploy_env|default({})), **(make_glance_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_d0000644000175000017500000000176315134411253033331 0ustar zuulzuul--- - name: Debug make_glance_deploy_cleanup_env when: make_glance_deploy_cleanup_env is defined ansible.builtin.debug: var: make_glance_deploy_cleanup_env - name: Debug make_glance_deploy_cleanup_params when: make_glance_deploy_cleanup_params is defined ansible.builtin.debug: var: make_glance_deploy_cleanup_params - name: Run glance_deploy_cleanup retries: "{{ make_glance_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_glance_deploy_cleanup_delay | default(omit) }}" until: "{{ make_glance_deploy_cleanup_until | default(true) }}" register: "make_glance_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_deploy_cleanup" dry_run: "{{ make_glance_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_deploy_cleanup_env|default({})), **(make_glance_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_prep0000644000175000017500000000146015134411253033417 0ustar zuulzuul--- - name: Debug make_ovn_prep_env when: make_ovn_prep_env is defined ansible.builtin.debug: var: make_ovn_prep_env - name: Debug make_ovn_prep_params when: make_ovn_prep_params is defined ansible.builtin.debug: var: make_ovn_prep_params - name: Run ovn_prep retries: "{{ make_ovn_prep_retries | default(omit) }}" delay: "{{ make_ovn_prep_delay | default(omit) }}" until: "{{ make_ovn_prep_until | default(true) }}" register: "make_ovn_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_prep" dry_run: "{{ make_ovn_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_prep_env|default({})), **(make_ovn_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn.yml0000644000175000017500000000134515134411253033173 0ustar zuulzuul--- - name: Debug make_ovn_env when: make_ovn_env is defined ansible.builtin.debug: var: make_ovn_env - name: Debug make_ovn_params when: make_ovn_params is defined ansible.builtin.debug: var: make_ovn_params - name: Run ovn retries: "{{ make_ovn_retries | default(omit) }}" delay: "{{ make_ovn_delay | default(omit) }}" until: "{{ make_ovn_until | default(true) }}" register: "make_ovn_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn" dry_run: "{{ make_ovn_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_env|default({})), **(make_ovn_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_clea0000644000175000017500000000153515134411253033360 0ustar zuulzuul--- - name: Debug make_ovn_cleanup_env when: make_ovn_cleanup_env is defined ansible.builtin.debug: var: make_ovn_cleanup_env - name: Debug make_ovn_cleanup_params when: make_ovn_cleanup_params is defined ansible.builtin.debug: var: make_ovn_cleanup_params - name: Run ovn_cleanup retries: "{{ make_ovn_cleanup_retries | default(omit) }}" delay: "{{ make_ovn_cleanup_delay | default(omit) }}" until: "{{ make_ovn_cleanup_until | default(true) }}" register: "make_ovn_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_cleanup" dry_run: "{{ make_ovn_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_cleanup_env|default({})), **(make_ovn_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_depl0000644000175000017500000000163115134411253033375 0ustar zuulzuul--- - name: Debug make_ovn_deploy_prep_env when: make_ovn_deploy_prep_env is defined ansible.builtin.debug: var: make_ovn_deploy_prep_env - name: Debug make_ovn_deploy_prep_params when: make_ovn_deploy_prep_params is defined ansible.builtin.debug: var: make_ovn_deploy_prep_params - name: Run ovn_deploy_prep retries: "{{ make_ovn_deploy_prep_retries | default(omit) }}" delay: "{{ make_ovn_deploy_prep_delay | default(omit) }}" until: "{{ make_ovn_deploy_prep_until | default(true) }}" register: "make_ovn_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_deploy_prep" dry_run: "{{ make_ovn_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_deploy_prep_env|default({})), **(make_ovn_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_depl0000644000175000017500000000151615134411253033377 0ustar zuulzuul--- - name: Debug make_ovn_deploy_env when: make_ovn_deploy_env is defined ansible.builtin.debug: var: make_ovn_deploy_env - name: Debug make_ovn_deploy_params when: make_ovn_deploy_params is defined ansible.builtin.debug: var: make_ovn_deploy_params - name: Run ovn_deploy retries: "{{ make_ovn_deploy_retries | default(omit) }}" delay: "{{ make_ovn_deploy_delay | default(omit) }}" until: "{{ make_ovn_deploy_until | default(true) }}" register: "make_ovn_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_deploy" dry_run: "{{ make_ovn_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_deploy_env|default({})), **(make_ovn_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_depl0000644000175000017500000000170615134411253033400 0ustar zuulzuul--- - name: Debug make_ovn_deploy_cleanup_env when: make_ovn_deploy_cleanup_env is defined ansible.builtin.debug: var: make_ovn_deploy_cleanup_env - name: Debug make_ovn_deploy_cleanup_params when: make_ovn_deploy_cleanup_params is defined ansible.builtin.debug: var: make_ovn_deploy_cleanup_params - name: Run ovn_deploy_cleanup retries: "{{ make_ovn_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_ovn_deploy_cleanup_delay | default(omit) }}" until: "{{ make_ovn_deploy_cleanup_until | default(true) }}" register: "make_ovn_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_deploy_cleanup" dry_run: "{{ make_ovn_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_deploy_cleanup_env|default({})), **(make_ovn_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000155415134411253033424 0ustar zuulzuul--- - name: Debug make_neutron_prep_env when: make_neutron_prep_env is defined ansible.builtin.debug: var: make_neutron_prep_env - name: Debug make_neutron_prep_params when: make_neutron_prep_params is defined ansible.builtin.debug: var: make_neutron_prep_params - name: Run neutron_prep retries: "{{ make_neutron_prep_retries | default(omit) }}" delay: "{{ make_neutron_prep_delay | default(omit) }}" until: "{{ make_neutron_prep_until | default(true) }}" register: "make_neutron_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_prep" dry_run: "{{ make_neutron_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_prep_env|default({})), **(make_neutron_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron.0000644000175000017500000000144115134411253033336 0ustar zuulzuul--- - name: Debug make_neutron_env when: make_neutron_env is defined ansible.builtin.debug: var: make_neutron_env - name: Debug make_neutron_params when: make_neutron_params is defined ansible.builtin.debug: var: make_neutron_params - name: Run neutron retries: "{{ make_neutron_retries | default(omit) }}" delay: "{{ make_neutron_delay | default(omit) }}" until: "{{ make_neutron_until | default(true) }}" register: "make_neutron_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron" dry_run: "{{ make_neutron_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_env|default({})), **(make_neutron_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000163115134411253033420 0ustar zuulzuul--- - name: Debug make_neutron_cleanup_env when: make_neutron_cleanup_env is defined ansible.builtin.debug: var: make_neutron_cleanup_env - name: Debug make_neutron_cleanup_params when: make_neutron_cleanup_params is defined ansible.builtin.debug: var: make_neutron_cleanup_params - name: Run neutron_cleanup retries: "{{ make_neutron_cleanup_retries | default(omit) }}" delay: "{{ make_neutron_cleanup_delay | default(omit) }}" until: "{{ make_neutron_cleanup_until | default(true) }}" register: "make_neutron_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_cleanup" dry_run: "{{ make_neutron_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_cleanup_env|default({})), **(make_neutron_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000172515134411253033424 0ustar zuulzuul--- - name: Debug make_neutron_deploy_prep_env when: make_neutron_deploy_prep_env is defined ansible.builtin.debug: var: make_neutron_deploy_prep_env - name: Debug make_neutron_deploy_prep_params when: make_neutron_deploy_prep_params is defined ansible.builtin.debug: var: make_neutron_deploy_prep_params - name: Run neutron_deploy_prep retries: "{{ make_neutron_deploy_prep_retries | default(omit) }}" delay: "{{ make_neutron_deploy_prep_delay | default(omit) }}" until: "{{ make_neutron_deploy_prep_until | default(true) }}" register: "make_neutron_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_deploy_prep" dry_run: "{{ make_neutron_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_deploy_prep_env|default({})), **(make_neutron_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000161215134411253033417 0ustar zuulzuul--- - name: Debug make_neutron_deploy_env when: make_neutron_deploy_env is defined ansible.builtin.debug: var: make_neutron_deploy_env - name: Debug make_neutron_deploy_params when: make_neutron_deploy_params is defined ansible.builtin.debug: var: make_neutron_deploy_params - name: Run neutron_deploy retries: "{{ make_neutron_deploy_retries | default(omit) }}" delay: "{{ make_neutron_deploy_delay | default(omit) }}" until: "{{ make_neutron_deploy_until | default(true) }}" register: "make_neutron_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_deploy" dry_run: "{{ make_neutron_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_deploy_env|default({})), **(make_neutron_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000200215134411253033411 0ustar zuulzuul--- - name: Debug make_neutron_deploy_cleanup_env when: make_neutron_deploy_cleanup_env is defined ansible.builtin.debug: var: make_neutron_deploy_cleanup_env - name: Debug make_neutron_deploy_cleanup_params when: make_neutron_deploy_cleanup_params is defined ansible.builtin.debug: var: make_neutron_deploy_cleanup_params - name: Run neutron_deploy_cleanup retries: "{{ make_neutron_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_neutron_deploy_cleanup_delay | default(omit) }}" until: "{{ make_neutron_deploy_cleanup_until | default(true) }}" register: "make_neutron_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_deploy_cleanup" dry_run: "{{ make_neutron_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_deploy_cleanup_env|default({})), **(make_neutron_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_p0000644000175000017500000000153515134411253033355 0ustar zuulzuul--- - name: Debug make_cinder_prep_env when: make_cinder_prep_env is defined ansible.builtin.debug: var: make_cinder_prep_env - name: Debug make_cinder_prep_params when: make_cinder_prep_params is defined ansible.builtin.debug: var: make_cinder_prep_params - name: Run cinder_prep retries: "{{ make_cinder_prep_retries | default(omit) }}" delay: "{{ make_cinder_prep_delay | default(omit) }}" until: "{{ make_cinder_prep_until | default(true) }}" register: "make_cinder_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_prep" dry_run: "{{ make_cinder_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_prep_env|default({})), **(make_cinder_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder.y0000644000175000017500000000142215134411253033300 0ustar zuulzuul--- - name: Debug make_cinder_env when: make_cinder_env is defined ansible.builtin.debug: var: make_cinder_env - name: Debug make_cinder_params when: make_cinder_params is defined ansible.builtin.debug: var: make_cinder_params - name: Run cinder retries: "{{ make_cinder_retries | default(omit) }}" delay: "{{ make_cinder_delay | default(omit) }}" until: "{{ make_cinder_until | default(true) }}" register: "make_cinder_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder" dry_run: "{{ make_cinder_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_env|default({})), **(make_cinder_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_c0000644000175000017500000000161215134411253033334 0ustar zuulzuul--- - name: Debug make_cinder_cleanup_env when: make_cinder_cleanup_env is defined ansible.builtin.debug: var: make_cinder_cleanup_env - name: Debug make_cinder_cleanup_params when: make_cinder_cleanup_params is defined ansible.builtin.debug: var: make_cinder_cleanup_params - name: Run cinder_cleanup retries: "{{ make_cinder_cleanup_retries | default(omit) }}" delay: "{{ make_cinder_cleanup_delay | default(omit) }}" until: "{{ make_cinder_cleanup_until | default(true) }}" register: "make_cinder_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_cleanup" dry_run: "{{ make_cinder_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_cleanup_env|default({})), **(make_cinder_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_d0000644000175000017500000000170615134411253033341 0ustar zuulzuul--- - name: Debug make_cinder_deploy_prep_env when: make_cinder_deploy_prep_env is defined ansible.builtin.debug: var: make_cinder_deploy_prep_env - name: Debug make_cinder_deploy_prep_params when: make_cinder_deploy_prep_params is defined ansible.builtin.debug: var: make_cinder_deploy_prep_params - name: Run cinder_deploy_prep retries: "{{ make_cinder_deploy_prep_retries | default(omit) }}" delay: "{{ make_cinder_deploy_prep_delay | default(omit) }}" until: "{{ make_cinder_deploy_prep_until | default(true) }}" register: "make_cinder_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_deploy_prep" dry_run: "{{ make_cinder_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_deploy_prep_env|default({})), **(make_cinder_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_d0000644000175000017500000000157315134411253033343 0ustar zuulzuul--- - name: Debug make_cinder_deploy_env when: make_cinder_deploy_env is defined ansible.builtin.debug: var: make_cinder_deploy_env - name: Debug make_cinder_deploy_params when: make_cinder_deploy_params is defined ansible.builtin.debug: var: make_cinder_deploy_params - name: Run cinder_deploy retries: "{{ make_cinder_deploy_retries | default(omit) }}" delay: "{{ make_cinder_deploy_delay | default(omit) }}" until: "{{ make_cinder_deploy_until | default(true) }}" register: "make_cinder_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_deploy" dry_run: "{{ make_cinder_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_deploy_env|default({})), **(make_cinder_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_d0000644000175000017500000000176315134411253033344 0ustar zuulzuul--- - name: Debug make_cinder_deploy_cleanup_env when: make_cinder_deploy_cleanup_env is defined ansible.builtin.debug: var: make_cinder_deploy_cleanup_env - name: Debug make_cinder_deploy_cleanup_params when: make_cinder_deploy_cleanup_params is defined ansible.builtin.debug: var: make_cinder_deploy_cleanup_params - name: Run cinder_deploy_cleanup retries: "{{ make_cinder_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_cinder_deploy_cleanup_delay | default(omit) }}" until: "{{ make_cinder_deploy_cleanup_until | default(true) }}" register: "make_cinder_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_deploy_cleanup" dry_run: "{{ make_cinder_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_deploy_cleanup_env|default({})), **(make_cinder_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000157315134411253033375 0ustar zuulzuul--- - name: Debug make_rabbitmq_prep_env when: make_rabbitmq_prep_env is defined ansible.builtin.debug: var: make_rabbitmq_prep_env - name: Debug make_rabbitmq_prep_params when: make_rabbitmq_prep_params is defined ansible.builtin.debug: var: make_rabbitmq_prep_params - name: Run rabbitmq_prep retries: "{{ make_rabbitmq_prep_retries | default(omit) }}" delay: "{{ make_rabbitmq_prep_delay | default(omit) }}" until: "{{ make_rabbitmq_prep_until | default(true) }}" register: "make_rabbitmq_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_prep" dry_run: "{{ make_rabbitmq_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_prep_env|default({})), **(make_rabbitmq_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000146015134411253033370 0ustar zuulzuul--- - name: Debug make_rabbitmq_env when: make_rabbitmq_env is defined ansible.builtin.debug: var: make_rabbitmq_env - name: Debug make_rabbitmq_params when: make_rabbitmq_params is defined ansible.builtin.debug: var: make_rabbitmq_params - name: Run rabbitmq retries: "{{ make_rabbitmq_retries | default(omit) }}" delay: "{{ make_rabbitmq_delay | default(omit) }}" until: "{{ make_rabbitmq_until | default(true) }}" register: "make_rabbitmq_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq" dry_run: "{{ make_rabbitmq_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_env|default({})), **(make_rabbitmq_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000165015134411253033371 0ustar zuulzuul--- - name: Debug make_rabbitmq_cleanup_env when: make_rabbitmq_cleanup_env is defined ansible.builtin.debug: var: make_rabbitmq_cleanup_env - name: Debug make_rabbitmq_cleanup_params when: make_rabbitmq_cleanup_params is defined ansible.builtin.debug: var: make_rabbitmq_cleanup_params - name: Run rabbitmq_cleanup retries: "{{ make_rabbitmq_cleanup_retries | default(omit) }}" delay: "{{ make_rabbitmq_cleanup_delay | default(omit) }}" until: "{{ make_rabbitmq_cleanup_until | default(true) }}" register: "make_rabbitmq_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_cleanup" dry_run: "{{ make_rabbitmq_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_cleanup_env|default({})), **(make_rabbitmq_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000174415134411253033375 0ustar zuulzuul--- - name: Debug make_rabbitmq_deploy_prep_env when: make_rabbitmq_deploy_prep_env is defined ansible.builtin.debug: var: make_rabbitmq_deploy_prep_env - name: Debug make_rabbitmq_deploy_prep_params when: make_rabbitmq_deploy_prep_params is defined ansible.builtin.debug: var: make_rabbitmq_deploy_prep_params - name: Run rabbitmq_deploy_prep retries: "{{ make_rabbitmq_deploy_prep_retries | default(omit) }}" delay: "{{ make_rabbitmq_deploy_prep_delay | default(omit) }}" until: "{{ make_rabbitmq_deploy_prep_until | default(true) }}" register: "make_rabbitmq_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_deploy_prep" dry_run: "{{ make_rabbitmq_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_deploy_prep_env|default({})), **(make_rabbitmq_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000163115134411253033370 0ustar zuulzuul--- - name: Debug make_rabbitmq_deploy_env when: make_rabbitmq_deploy_env is defined ansible.builtin.debug: var: make_rabbitmq_deploy_env - name: Debug make_rabbitmq_deploy_params when: make_rabbitmq_deploy_params is defined ansible.builtin.debug: var: make_rabbitmq_deploy_params - name: Run rabbitmq_deploy retries: "{{ make_rabbitmq_deploy_retries | default(omit) }}" delay: "{{ make_rabbitmq_deploy_delay | default(omit) }}" until: "{{ make_rabbitmq_deploy_until | default(true) }}" register: "make_rabbitmq_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_deploy" dry_run: "{{ make_rabbitmq_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_deploy_env|default({})), **(make_rabbitmq_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rabbitmq0000644000175000017500000000202115134411253033362 0ustar zuulzuul--- - name: Debug make_rabbitmq_deploy_cleanup_env when: make_rabbitmq_deploy_cleanup_env is defined ansible.builtin.debug: var: make_rabbitmq_deploy_cleanup_env - name: Debug make_rabbitmq_deploy_cleanup_params when: make_rabbitmq_deploy_cleanup_params is defined ansible.builtin.debug: var: make_rabbitmq_deploy_cleanup_params - name: Run rabbitmq_deploy_cleanup retries: "{{ make_rabbitmq_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_rabbitmq_deploy_cleanup_delay | default(omit) }}" until: "{{ make_rabbitmq_deploy_cleanup_until | default(true) }}" register: "make_rabbitmq_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rabbitmq_deploy_cleanup" dry_run: "{{ make_rabbitmq_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rabbitmq_deploy_cleanup_env|default({})), **(make_rabbitmq_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_p0000644000175000017500000000153515134411253033374 0ustar zuulzuul--- - name: Debug make_ironic_prep_env when: make_ironic_prep_env is defined ansible.builtin.debug: var: make_ironic_prep_env - name: Debug make_ironic_prep_params when: make_ironic_prep_params is defined ansible.builtin.debug: var: make_ironic_prep_params - name: Run ironic_prep retries: "{{ make_ironic_prep_retries | default(omit) }}" delay: "{{ make_ironic_prep_delay | default(omit) }}" until: "{{ make_ironic_prep_until | default(true) }}" register: "make_ironic_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_prep" dry_run: "{{ make_ironic_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_prep_env|default({})), **(make_ironic_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic.y0000644000175000017500000000142215134411253033317 0ustar zuulzuul--- - name: Debug make_ironic_env when: make_ironic_env is defined ansible.builtin.debug: var: make_ironic_env - name: Debug make_ironic_params when: make_ironic_params is defined ansible.builtin.debug: var: make_ironic_params - name: Run ironic retries: "{{ make_ironic_retries | default(omit) }}" delay: "{{ make_ironic_delay | default(omit) }}" until: "{{ make_ironic_until | default(true) }}" register: "make_ironic_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic" dry_run: "{{ make_ironic_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_env|default({})), **(make_ironic_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_c0000644000175000017500000000161215134411253033353 0ustar zuulzuul--- - name: Debug make_ironic_cleanup_env when: make_ironic_cleanup_env is defined ansible.builtin.debug: var: make_ironic_cleanup_env - name: Debug make_ironic_cleanup_params when: make_ironic_cleanup_params is defined ansible.builtin.debug: var: make_ironic_cleanup_params - name: Run ironic_cleanup retries: "{{ make_ironic_cleanup_retries | default(omit) }}" delay: "{{ make_ironic_cleanup_delay | default(omit) }}" until: "{{ make_ironic_cleanup_until | default(true) }}" register: "make_ironic_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_cleanup" dry_run: "{{ make_ironic_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_cleanup_env|default({})), **(make_ironic_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_d0000644000175000017500000000170615134411253033360 0ustar zuulzuul--- - name: Debug make_ironic_deploy_prep_env when: make_ironic_deploy_prep_env is defined ansible.builtin.debug: var: make_ironic_deploy_prep_env - name: Debug make_ironic_deploy_prep_params when: make_ironic_deploy_prep_params is defined ansible.builtin.debug: var: make_ironic_deploy_prep_params - name: Run ironic_deploy_prep retries: "{{ make_ironic_deploy_prep_retries | default(omit) }}" delay: "{{ make_ironic_deploy_prep_delay | default(omit) }}" until: "{{ make_ironic_deploy_prep_until | default(true) }}" register: "make_ironic_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_deploy_prep" dry_run: "{{ make_ironic_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_deploy_prep_env|default({})), **(make_ironic_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_d0000644000175000017500000000157315134411253033362 0ustar zuulzuul--- - name: Debug make_ironic_deploy_env when: make_ironic_deploy_env is defined ansible.builtin.debug: var: make_ironic_deploy_env - name: Debug make_ironic_deploy_params when: make_ironic_deploy_params is defined ansible.builtin.debug: var: make_ironic_deploy_params - name: Run ironic_deploy retries: "{{ make_ironic_deploy_retries | default(omit) }}" delay: "{{ make_ironic_deploy_delay | default(omit) }}" until: "{{ make_ironic_deploy_until | default(true) }}" register: "make_ironic_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_deploy" dry_run: "{{ make_ironic_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_deploy_env|default({})), **(make_ironic_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_d0000644000175000017500000000176315134411253033363 0ustar zuulzuul--- - name: Debug make_ironic_deploy_cleanup_env when: make_ironic_deploy_cleanup_env is defined ansible.builtin.debug: var: make_ironic_deploy_cleanup_env - name: Debug make_ironic_deploy_cleanup_params when: make_ironic_deploy_cleanup_params is defined ansible.builtin.debug: var: make_ironic_deploy_cleanup_params - name: Run ironic_deploy_cleanup retries: "{{ make_ironic_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_ironic_deploy_cleanup_delay | default(omit) }}" until: "{{ make_ironic_deploy_cleanup_until | default(true) }}" register: "make_ironic_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_deploy_cleanup" dry_run: "{{ make_ironic_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_deploy_cleanup_env|default({})), **(make_ironic_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000155415134411253033360 0ustar zuulzuul--- - name: Debug make_octavia_prep_env when: make_octavia_prep_env is defined ansible.builtin.debug: var: make_octavia_prep_env - name: Debug make_octavia_prep_params when: make_octavia_prep_params is defined ansible.builtin.debug: var: make_octavia_prep_params - name: Run octavia_prep retries: "{{ make_octavia_prep_retries | default(omit) }}" delay: "{{ make_octavia_prep_delay | default(omit) }}" until: "{{ make_octavia_prep_until | default(true) }}" register: "make_octavia_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_prep" dry_run: "{{ make_octavia_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_prep_env|default({})), **(make_octavia_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia.0000644000175000017500000000144115134411253033272 0ustar zuulzuul--- - name: Debug make_octavia_env when: make_octavia_env is defined ansible.builtin.debug: var: make_octavia_env - name: Debug make_octavia_params when: make_octavia_params is defined ansible.builtin.debug: var: make_octavia_params - name: Run octavia retries: "{{ make_octavia_retries | default(omit) }}" delay: "{{ make_octavia_delay | default(omit) }}" until: "{{ make_octavia_until | default(true) }}" register: "make_octavia_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia" dry_run: "{{ make_octavia_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_env|default({})), **(make_octavia_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000163115134411253033354 0ustar zuulzuul--- - name: Debug make_octavia_cleanup_env when: make_octavia_cleanup_env is defined ansible.builtin.debug: var: make_octavia_cleanup_env - name: Debug make_octavia_cleanup_params when: make_octavia_cleanup_params is defined ansible.builtin.debug: var: make_octavia_cleanup_params - name: Run octavia_cleanup retries: "{{ make_octavia_cleanup_retries | default(omit) }}" delay: "{{ make_octavia_cleanup_delay | default(omit) }}" until: "{{ make_octavia_cleanup_until | default(true) }}" register: "make_octavia_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_cleanup" dry_run: "{{ make_octavia_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_cleanup_env|default({})), **(make_octavia_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000172515134411253033360 0ustar zuulzuul--- - name: Debug make_octavia_deploy_prep_env when: make_octavia_deploy_prep_env is defined ansible.builtin.debug: var: make_octavia_deploy_prep_env - name: Debug make_octavia_deploy_prep_params when: make_octavia_deploy_prep_params is defined ansible.builtin.debug: var: make_octavia_deploy_prep_params - name: Run octavia_deploy_prep retries: "{{ make_octavia_deploy_prep_retries | default(omit) }}" delay: "{{ make_octavia_deploy_prep_delay | default(omit) }}" until: "{{ make_octavia_deploy_prep_until | default(true) }}" register: "make_octavia_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_deploy_prep" dry_run: "{{ make_octavia_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_deploy_prep_env|default({})), **(make_octavia_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000161215134411253033353 0ustar zuulzuul--- - name: Debug make_octavia_deploy_env when: make_octavia_deploy_env is defined ansible.builtin.debug: var: make_octavia_deploy_env - name: Debug make_octavia_deploy_params when: make_octavia_deploy_params is defined ansible.builtin.debug: var: make_octavia_deploy_params - name: Run octavia_deploy retries: "{{ make_octavia_deploy_retries | default(omit) }}" delay: "{{ make_octavia_deploy_delay | default(omit) }}" until: "{{ make_octavia_deploy_until | default(true) }}" register: "make_octavia_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_deploy" dry_run: "{{ make_octavia_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_deploy_env|default({})), **(make_octavia_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000200215134411253033345 0ustar zuulzuul--- - name: Debug make_octavia_deploy_cleanup_env when: make_octavia_deploy_cleanup_env is defined ansible.builtin.debug: var: make_octavia_deploy_cleanup_env - name: Debug make_octavia_deploy_cleanup_params when: make_octavia_deploy_cleanup_params is defined ansible.builtin.debug: var: make_octavia_deploy_cleanup_params - name: Run octavia_deploy_cleanup retries: "{{ make_octavia_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_octavia_deploy_cleanup_delay | default(omit) }}" until: "{{ make_octavia_deploy_cleanup_until | default(true) }}" register: "make_octavia_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_deploy_cleanup" dry_run: "{{ make_octavia_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_deploy_cleanup_env|default({})), **(make_octavia_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000161215134411253033364 0ustar zuulzuul--- - name: Debug make_designate_prep_env when: make_designate_prep_env is defined ansible.builtin.debug: var: make_designate_prep_env - name: Debug make_designate_prep_params when: make_designate_prep_params is defined ansible.builtin.debug: var: make_designate_prep_params - name: Run designate_prep retries: "{{ make_designate_prep_retries | default(omit) }}" delay: "{{ make_designate_prep_delay | default(omit) }}" until: "{{ make_designate_prep_until | default(true) }}" register: "make_designate_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_prep" dry_run: "{{ make_designate_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_prep_env|default({})), **(make_designate_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000147715134411253033375 0ustar zuulzuul--- - name: Debug make_designate_env when: make_designate_env is defined ansible.builtin.debug: var: make_designate_env - name: Debug make_designate_params when: make_designate_params is defined ansible.builtin.debug: var: make_designate_params - name: Run designate retries: "{{ make_designate_retries | default(omit) }}" delay: "{{ make_designate_delay | default(omit) }}" until: "{{ make_designate_until | default(true) }}" register: "make_designate_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate" dry_run: "{{ make_designate_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_env|default({})), **(make_designate_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000166715134411253033376 0ustar zuulzuul--- - name: Debug make_designate_cleanup_env when: make_designate_cleanup_env is defined ansible.builtin.debug: var: make_designate_cleanup_env - name: Debug make_designate_cleanup_params when: make_designate_cleanup_params is defined ansible.builtin.debug: var: make_designate_cleanup_params - name: Run designate_cleanup retries: "{{ make_designate_cleanup_retries | default(omit) }}" delay: "{{ make_designate_cleanup_delay | default(omit) }}" until: "{{ make_designate_cleanup_until | default(true) }}" register: "make_designate_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_cleanup" dry_run: "{{ make_designate_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_cleanup_env|default({})), **(make_designate_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000176315134411253033373 0ustar zuulzuul--- - name: Debug make_designate_deploy_prep_env when: make_designate_deploy_prep_env is defined ansible.builtin.debug: var: make_designate_deploy_prep_env - name: Debug make_designate_deploy_prep_params when: make_designate_deploy_prep_params is defined ansible.builtin.debug: var: make_designate_deploy_prep_params - name: Run designate_deploy_prep retries: "{{ make_designate_deploy_prep_retries | default(omit) }}" delay: "{{ make_designate_deploy_prep_delay | default(omit) }}" until: "{{ make_designate_deploy_prep_until | default(true) }}" register: "make_designate_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_deploy_prep" dry_run: "{{ make_designate_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_deploy_prep_env|default({})), **(make_designate_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000165015134411253033366 0ustar zuulzuul--- - name: Debug make_designate_deploy_env when: make_designate_deploy_env is defined ansible.builtin.debug: var: make_designate_deploy_env - name: Debug make_designate_deploy_params when: make_designate_deploy_params is defined ansible.builtin.debug: var: make_designate_deploy_params - name: Run designate_deploy retries: "{{ make_designate_deploy_retries | default(omit) }}" delay: "{{ make_designate_deploy_delay | default(omit) }}" until: "{{ make_designate_deploy_until | default(true) }}" register: "make_designate_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_deploy" dry_run: "{{ make_designate_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_deploy_env|default({})), **(make_designate_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000204015134411253033360 0ustar zuulzuul--- - name: Debug make_designate_deploy_cleanup_env when: make_designate_deploy_cleanup_env is defined ansible.builtin.debug: var: make_designate_deploy_cleanup_env - name: Debug make_designate_deploy_cleanup_params when: make_designate_deploy_cleanup_params is defined ansible.builtin.debug: var: make_designate_deploy_cleanup_params - name: Run designate_deploy_cleanup retries: "{{ make_designate_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_designate_deploy_cleanup_delay | default(omit) }}" until: "{{ make_designate_deploy_cleanup_until | default(true) }}" register: "make_designate_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_deploy_cleanup" dry_run: "{{ make_designate_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_deploy_cleanup_env|default({})), **(make_designate_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_pre0000644000175000017500000000147715134411253033410 0ustar zuulzuul--- - name: Debug make_nova_prep_env when: make_nova_prep_env is defined ansible.builtin.debug: var: make_nova_prep_env - name: Debug make_nova_prep_params when: make_nova_prep_params is defined ansible.builtin.debug: var: make_nova_prep_params - name: Run nova_prep retries: "{{ make_nova_prep_retries | default(omit) }}" delay: "{{ make_nova_prep_delay | default(omit) }}" until: "{{ make_nova_prep_until | default(true) }}" register: "make_nova_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_prep" dry_run: "{{ make_nova_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_prep_env|default({})), **(make_nova_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova.yml0000644000175000017500000000136415134411253033335 0ustar zuulzuul--- - name: Debug make_nova_env when: make_nova_env is defined ansible.builtin.debug: var: make_nova_env - name: Debug make_nova_params when: make_nova_params is defined ansible.builtin.debug: var: make_nova_params - name: Run nova retries: "{{ make_nova_retries | default(omit) }}" delay: "{{ make_nova_delay | default(omit) }}" until: "{{ make_nova_until | default(true) }}" register: "make_nova_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova" dry_run: "{{ make_nova_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_env|default({})), **(make_nova_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_cle0000644000175000017500000000155415134411253033361 0ustar zuulzuul--- - name: Debug make_nova_cleanup_env when: make_nova_cleanup_env is defined ansible.builtin.debug: var: make_nova_cleanup_env - name: Debug make_nova_cleanup_params when: make_nova_cleanup_params is defined ansible.builtin.debug: var: make_nova_cleanup_params - name: Run nova_cleanup retries: "{{ make_nova_cleanup_retries | default(omit) }}" delay: "{{ make_nova_cleanup_delay | default(omit) }}" until: "{{ make_nova_cleanup_until | default(true) }}" register: "make_nova_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_cleanup" dry_run: "{{ make_nova_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_cleanup_env|default({})), **(make_nova_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_dep0000644000175000017500000000165015134411253033363 0ustar zuulzuul--- - name: Debug make_nova_deploy_prep_env when: make_nova_deploy_prep_env is defined ansible.builtin.debug: var: make_nova_deploy_prep_env - name: Debug make_nova_deploy_prep_params when: make_nova_deploy_prep_params is defined ansible.builtin.debug: var: make_nova_deploy_prep_params - name: Run nova_deploy_prep retries: "{{ make_nova_deploy_prep_retries | default(omit) }}" delay: "{{ make_nova_deploy_prep_delay | default(omit) }}" until: "{{ make_nova_deploy_prep_until | default(true) }}" register: "make_nova_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_deploy_prep" dry_run: "{{ make_nova_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_deploy_prep_env|default({})), **(make_nova_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_dep0000644000175000017500000000153515134411253033365 0ustar zuulzuul--- - name: Debug make_nova_deploy_env when: make_nova_deploy_env is defined ansible.builtin.debug: var: make_nova_deploy_env - name: Debug make_nova_deploy_params when: make_nova_deploy_params is defined ansible.builtin.debug: var: make_nova_deploy_params - name: Run nova_deploy retries: "{{ make_nova_deploy_retries | default(omit) }}" delay: "{{ make_nova_deploy_delay | default(omit) }}" until: "{{ make_nova_deploy_until | default(true) }}" register: "make_nova_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_deploy" dry_run: "{{ make_nova_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_deploy_env|default({})), **(make_nova_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nova_dep0000644000175000017500000000172515134411253033366 0ustar zuulzuul--- - name: Debug make_nova_deploy_cleanup_env when: make_nova_deploy_cleanup_env is defined ansible.builtin.debug: var: make_nova_deploy_cleanup_env - name: Debug make_nova_deploy_cleanup_params when: make_nova_deploy_cleanup_params is defined ansible.builtin.debug: var: make_nova_deploy_cleanup_params - name: Run nova_deploy_cleanup retries: "{{ make_nova_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_nova_deploy_cleanup_delay | default(omit) }}" until: "{{ make_nova_deploy_cleanup_until | default(true) }}" register: "make_nova_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nova_deploy_cleanup" dry_run: "{{ make_nova_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nova_deploy_cleanup_env|default({})), **(make_nova_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000166715134411253033336 0ustar zuulzuul--- - name: Debug make_mariadb_kuttl_run_env when: make_mariadb_kuttl_run_env is defined ansible.builtin.debug: var: make_mariadb_kuttl_run_env - name: Debug make_mariadb_kuttl_run_params when: make_mariadb_kuttl_run_params is defined ansible.builtin.debug: var: make_mariadb_kuttl_run_params - name: Run mariadb_kuttl_run retries: "{{ make_mariadb_kuttl_run_retries | default(omit) }}" delay: "{{ make_mariadb_kuttl_run_delay | default(omit) }}" until: "{{ make_mariadb_kuttl_run_until | default(true) }}" register: "make_mariadb_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_kuttl_run" dry_run: "{{ make_mariadb_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_kuttl_run_env|default({})), **(make_mariadb_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000157315134411253033332 0ustar zuulzuul--- - name: Debug make_mariadb_kuttl_env when: make_mariadb_kuttl_env is defined ansible.builtin.debug: var: make_mariadb_kuttl_env - name: Debug make_mariadb_kuttl_params when: make_mariadb_kuttl_params is defined ansible.builtin.debug: var: make_mariadb_kuttl_params - name: Run mariadb_kuttl retries: "{{ make_mariadb_kuttl_retries | default(omit) }}" delay: "{{ make_mariadb_kuttl_delay | default(omit) }}" until: "{{ make_mariadb_kuttl_until | default(true) }}" register: "make_mariadb_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_kuttl" dry_run: "{{ make_mariadb_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_kuttl_env|default({})), **(make_mariadb_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db0000644000175000017500000000157315134411253033404 0ustar zuulzuul--- - name: Debug make_kuttl_db_prep_env when: make_kuttl_db_prep_env is defined ansible.builtin.debug: var: make_kuttl_db_prep_env - name: Debug make_kuttl_db_prep_params when: make_kuttl_db_prep_params is defined ansible.builtin.debug: var: make_kuttl_db_prep_params - name: Run kuttl_db_prep retries: "{{ make_kuttl_db_prep_retries | default(omit) }}" delay: "{{ make_kuttl_db_prep_delay | default(omit) }}" until: "{{ make_kuttl_db_prep_until | default(true) }}" register: "make_kuttl_db_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_db_prep" dry_run: "{{ make_kuttl_db_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_db_prep_env|default({})), **(make_kuttl_db_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_db0000644000175000017500000000165015134411253033400 0ustar zuulzuul--- - name: Debug make_kuttl_db_cleanup_env when: make_kuttl_db_cleanup_env is defined ansible.builtin.debug: var: make_kuttl_db_cleanup_env - name: Debug make_kuttl_db_cleanup_params when: make_kuttl_db_cleanup_params is defined ansible.builtin.debug: var: make_kuttl_db_cleanup_params - name: Run kuttl_db_cleanup retries: "{{ make_kuttl_db_cleanup_retries | default(omit) }}" delay: "{{ make_kuttl_db_cleanup_delay | default(omit) }}" until: "{{ make_kuttl_db_cleanup_until | default(true) }}" register: "make_kuttl_db_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_db_cleanup" dry_run: "{{ make_kuttl_db_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_db_cleanup_env|default({})), **(make_kuttl_db_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_common_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_co0000644000175000017500000000166715134411253033424 0ustar zuulzuul--- - name: Debug make_kuttl_common_prep_env when: make_kuttl_common_prep_env is defined ansible.builtin.debug: var: make_kuttl_common_prep_env - name: Debug make_kuttl_common_prep_params when: make_kuttl_common_prep_params is defined ansible.builtin.debug: var: make_kuttl_common_prep_params - name: Run kuttl_common_prep retries: "{{ make_kuttl_common_prep_retries | default(omit) }}" delay: "{{ make_kuttl_common_prep_delay | default(omit) }}" until: "{{ make_kuttl_common_prep_until | default(true) }}" register: "make_kuttl_common_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_common_prep" dry_run: "{{ make_kuttl_common_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_common_prep_env|default({})), **(make_kuttl_common_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_common_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_kuttl_co0000644000175000017500000000174415134411253033420 0ustar zuulzuul--- - name: Debug make_kuttl_common_cleanup_env when: make_kuttl_common_cleanup_env is defined ansible.builtin.debug: var: make_kuttl_common_cleanup_env - name: Debug make_kuttl_common_cleanup_params when: make_kuttl_common_cleanup_params is defined ansible.builtin.debug: var: make_kuttl_common_cleanup_params - name: Run kuttl_common_cleanup retries: "{{ make_kuttl_common_cleanup_retries | default(omit) }}" delay: "{{ make_kuttl_common_cleanup_delay | default(omit) }}" until: "{{ make_kuttl_common_cleanup_until | default(true) }}" register: "make_kuttl_common_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make kuttl_common_cleanup" dry_run: "{{ make_kuttl_common_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_kuttl_common_cleanup_env|default({})), **(make_kuttl_common_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000170615134411253033433 0ustar zuulzuul--- - name: Debug make_keystone_kuttl_run_env when: make_keystone_kuttl_run_env is defined ansible.builtin.debug: var: make_keystone_kuttl_run_env - name: Debug make_keystone_kuttl_run_params when: make_keystone_kuttl_run_params is defined ansible.builtin.debug: var: make_keystone_kuttl_run_params - name: Run keystone_kuttl_run retries: "{{ make_keystone_kuttl_run_retries | default(omit) }}" delay: "{{ make_keystone_kuttl_run_delay | default(omit) }}" until: "{{ make_keystone_kuttl_run_until | default(true) }}" register: "make_keystone_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_kuttl_run" dry_run: "{{ make_keystone_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_kuttl_run_env|default({})), **(make_keystone_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_keystone0000644000175000017500000000161215134411253033427 0ustar zuulzuul--- - name: Debug make_keystone_kuttl_env when: make_keystone_kuttl_env is defined ansible.builtin.debug: var: make_keystone_kuttl_env - name: Debug make_keystone_kuttl_params when: make_keystone_kuttl_params is defined ansible.builtin.debug: var: make_keystone_kuttl_params - name: Run keystone_kuttl retries: "{{ make_keystone_kuttl_retries | default(omit) }}" delay: "{{ make_keystone_kuttl_delay | default(omit) }}" until: "{{ make_keystone_kuttl_until | default(true) }}" register: "make_keystone_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make keystone_kuttl" dry_run: "{{ make_keystone_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_keystone_kuttl_env|default({})), **(make_keystone_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000170615134411253033333 0ustar zuulzuul--- - name: Debug make_barbican_kuttl_run_env when: make_barbican_kuttl_run_env is defined ansible.builtin.debug: var: make_barbican_kuttl_run_env - name: Debug make_barbican_kuttl_run_params when: make_barbican_kuttl_run_params is defined ansible.builtin.debug: var: make_barbican_kuttl_run_params - name: Run barbican_kuttl_run retries: "{{ make_barbican_kuttl_run_retries | default(omit) }}" delay: "{{ make_barbican_kuttl_run_delay | default(omit) }}" until: "{{ make_barbican_kuttl_run_until | default(true) }}" register: "make_barbican_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_kuttl_run" dry_run: "{{ make_barbican_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_kuttl_run_env|default({})), **(make_barbican_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_barbican0000644000175000017500000000161215134411253033327 0ustar zuulzuul--- - name: Debug make_barbican_kuttl_env when: make_barbican_kuttl_env is defined ansible.builtin.debug: var: make_barbican_kuttl_env - name: Debug make_barbican_kuttl_params when: make_barbican_kuttl_params is defined ansible.builtin.debug: var: make_barbican_kuttl_params - name: Run barbican_kuttl retries: "{{ make_barbican_kuttl_retries | default(omit) }}" delay: "{{ make_barbican_kuttl_delay | default(omit) }}" until: "{{ make_barbican_kuttl_until | default(true) }}" register: "make_barbican_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make barbican_kuttl" dry_run: "{{ make_barbican_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_barbican_kuttl_env|default({})), **(make_barbican_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000172515134411253033357 0ustar zuulzuul--- - name: Debug make_placement_kuttl_run_env when: make_placement_kuttl_run_env is defined ansible.builtin.debug: var: make_placement_kuttl_run_env - name: Debug make_placement_kuttl_run_params when: make_placement_kuttl_run_params is defined ansible.builtin.debug: var: make_placement_kuttl_run_params - name: Run placement_kuttl_run retries: "{{ make_placement_kuttl_run_retries | default(omit) }}" delay: "{{ make_placement_kuttl_run_delay | default(omit) }}" until: "{{ make_placement_kuttl_run_until | default(true) }}" register: "make_placement_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_kuttl_run" dry_run: "{{ make_placement_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_kuttl_run_env|default({})), **(make_placement_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placement_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_placemen0000644000175000017500000000163115134411253033353 0ustar zuulzuul--- - name: Debug make_placement_kuttl_env when: make_placement_kuttl_env is defined ansible.builtin.debug: var: make_placement_kuttl_env - name: Debug make_placement_kuttl_params when: make_placement_kuttl_params is defined ansible.builtin.debug: var: make_placement_kuttl_params - name: Run placement_kuttl retries: "{{ make_placement_kuttl_retries | default(omit) }}" delay: "{{ make_placement_kuttl_delay | default(omit) }}" until: "{{ make_placement_kuttl_until | default(true) }}" register: "make_placement_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make placement_kuttl" dry_run: "{{ make_placement_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_placement_kuttl_env|default({})), **(make_placement_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_k0000644000175000017500000000165015134411253033346 0ustar zuulzuul--- - name: Debug make_cinder_kuttl_run_env when: make_cinder_kuttl_run_env is defined ansible.builtin.debug: var: make_cinder_kuttl_run_env - name: Debug make_cinder_kuttl_run_params when: make_cinder_kuttl_run_params is defined ansible.builtin.debug: var: make_cinder_kuttl_run_params - name: Run cinder_kuttl_run retries: "{{ make_cinder_kuttl_run_retries | default(omit) }}" delay: "{{ make_cinder_kuttl_run_delay | default(omit) }}" until: "{{ make_cinder_kuttl_run_until | default(true) }}" register: "make_cinder_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_kuttl_run" dry_run: "{{ make_cinder_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_kuttl_run_env|default({})), **(make_cinder_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cinder_k0000644000175000017500000000155415134411253033351 0ustar zuulzuul--- - name: Debug make_cinder_kuttl_env when: make_cinder_kuttl_env is defined ansible.builtin.debug: var: make_cinder_kuttl_env - name: Debug make_cinder_kuttl_params when: make_cinder_kuttl_params is defined ansible.builtin.debug: var: make_cinder_kuttl_params - name: Run cinder_kuttl retries: "{{ make_cinder_kuttl_retries | default(omit) }}" delay: "{{ make_cinder_kuttl_delay | default(omit) }}" until: "{{ make_cinder_kuttl_until | default(true) }}" register: "make_cinder_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make cinder_kuttl" dry_run: "{{ make_cinder_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cinder_kuttl_env|default({})), **(make_cinder_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000166715134411253033431 0ustar zuulzuul--- - name: Debug make_neutron_kuttl_run_env when: make_neutron_kuttl_run_env is defined ansible.builtin.debug: var: make_neutron_kuttl_run_env - name: Debug make_neutron_kuttl_run_params when: make_neutron_kuttl_run_params is defined ansible.builtin.debug: var: make_neutron_kuttl_run_params - name: Run neutron_kuttl_run retries: "{{ make_neutron_kuttl_run_retries | default(omit) }}" delay: "{{ make_neutron_kuttl_run_delay | default(omit) }}" until: "{{ make_neutron_kuttl_run_until | default(true) }}" register: "make_neutron_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_kuttl_run" dry_run: "{{ make_neutron_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_kuttl_run_env|default({})), **(make_neutron_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_neutron_0000644000175000017500000000157315134411253033425 0ustar zuulzuul--- - name: Debug make_neutron_kuttl_env when: make_neutron_kuttl_env is defined ansible.builtin.debug: var: make_neutron_kuttl_env - name: Debug make_neutron_kuttl_params when: make_neutron_kuttl_params is defined ansible.builtin.debug: var: make_neutron_kuttl_params - name: Run neutron_kuttl retries: "{{ make_neutron_kuttl_retries | default(omit) }}" delay: "{{ make_neutron_kuttl_delay | default(omit) }}" until: "{{ make_neutron_kuttl_until | default(true) }}" register: "make_neutron_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make neutron_kuttl" dry_run: "{{ make_neutron_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_neutron_kuttl_env|default({})), **(make_neutron_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000166715134411253033365 0ustar zuulzuul--- - name: Debug make_octavia_kuttl_run_env when: make_octavia_kuttl_run_env is defined ansible.builtin.debug: var: make_octavia_kuttl_run_env - name: Debug make_octavia_kuttl_run_params when: make_octavia_kuttl_run_params is defined ansible.builtin.debug: var: make_octavia_kuttl_run_params - name: Run octavia_kuttl_run retries: "{{ make_octavia_kuttl_run_retries | default(omit) }}" delay: "{{ make_octavia_kuttl_run_delay | default(omit) }}" until: "{{ make_octavia_kuttl_run_until | default(true) }}" register: "make_octavia_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_kuttl_run" dry_run: "{{ make_octavia_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_kuttl_run_env|default({})), **(make_octavia_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_octavia_0000644000175000017500000000157315134411253033361 0ustar zuulzuul--- - name: Debug make_octavia_kuttl_env when: make_octavia_kuttl_env is defined ansible.builtin.debug: var: make_octavia_kuttl_env - name: Debug make_octavia_kuttl_params when: make_octavia_kuttl_params is defined ansible.builtin.debug: var: make_octavia_kuttl_params - name: Run octavia_kuttl retries: "{{ make_octavia_kuttl_retries | default(omit) }}" delay: "{{ make_octavia_kuttl_delay | default(omit) }}" until: "{{ make_octavia_kuttl_until | default(true) }}" register: "make_octavia_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make octavia_kuttl" dry_run: "{{ make_octavia_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_octavia_kuttl_env|default({})), **(make_octavia_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000163115134411253033365 0ustar zuulzuul--- - name: Debug make_designate_kuttl_env when: make_designate_kuttl_env is defined ansible.builtin.debug: var: make_designate_kuttl_env - name: Debug make_designate_kuttl_params when: make_designate_kuttl_params is defined ansible.builtin.debug: var: make_designate_kuttl_params - name: Run designate_kuttl retries: "{{ make_designate_kuttl_retries | default(omit) }}" delay: "{{ make_designate_kuttl_delay | default(omit) }}" until: "{{ make_designate_kuttl_until | default(true) }}" register: "make_designate_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_kuttl" dry_run: "{{ make_designate_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_kuttl_env|default({})), **(make_designate_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designate_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_designat0000644000175000017500000000172515134411253033371 0ustar zuulzuul--- - name: Debug make_designate_kuttl_run_env when: make_designate_kuttl_run_env is defined ansible.builtin.debug: var: make_designate_kuttl_run_env - name: Debug make_designate_kuttl_run_params when: make_designate_kuttl_run_params is defined ansible.builtin.debug: var: make_designate_kuttl_run_params - name: Run designate_kuttl_run retries: "{{ make_designate_kuttl_run_retries | default(omit) }}" delay: "{{ make_designate_kuttl_run_delay | default(omit) }}" until: "{{ make_designate_kuttl_run_until | default(true) }}" register: "make_designate_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make designate_kuttl_run" dry_run: "{{ make_designate_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_designate_kuttl_run_env|default({})), **(make_designate_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kutt0000644000175000017500000000157315134411253033445 0ustar zuulzuul--- - name: Debug make_ovn_kuttl_run_env when: make_ovn_kuttl_run_env is defined ansible.builtin.debug: var: make_ovn_kuttl_run_env - name: Debug make_ovn_kuttl_run_params when: make_ovn_kuttl_run_params is defined ansible.builtin.debug: var: make_ovn_kuttl_run_params - name: Run ovn_kuttl_run retries: "{{ make_ovn_kuttl_run_retries | default(omit) }}" delay: "{{ make_ovn_kuttl_run_delay | default(omit) }}" until: "{{ make_ovn_kuttl_run_until | default(true) }}" register: "make_ovn_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_kuttl_run" dry_run: "{{ make_ovn_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_kuttl_run_env|default({})), **(make_ovn_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ovn_kutt0000644000175000017500000000147715134411253033450 0ustar zuulzuul--- - name: Debug make_ovn_kuttl_env when: make_ovn_kuttl_env is defined ansible.builtin.debug: var: make_ovn_kuttl_env - name: Debug make_ovn_kuttl_params when: make_ovn_kuttl_params is defined ansible.builtin.debug: var: make_ovn_kuttl_params - name: Run ovn_kuttl retries: "{{ make_ovn_kuttl_retries | default(omit) }}" delay: "{{ make_ovn_kuttl_delay | default(omit) }}" until: "{{ make_ovn_kuttl_until | default(true) }}" register: "make_ovn_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ovn_kuttl" dry_run: "{{ make_ovn_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ovn_kuttl_env|default({})), **(make_ovn_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_ku0000644000175000017500000000163115134411253033365 0ustar zuulzuul--- - name: Debug make_infra_kuttl_run_env when: make_infra_kuttl_run_env is defined ansible.builtin.debug: var: make_infra_kuttl_run_env - name: Debug make_infra_kuttl_run_params when: make_infra_kuttl_run_params is defined ansible.builtin.debug: var: make_infra_kuttl_run_params - name: Run infra_kuttl_run retries: "{{ make_infra_kuttl_run_retries | default(omit) }}" delay: "{{ make_infra_kuttl_run_delay | default(omit) }}" until: "{{ make_infra_kuttl_run_until | default(true) }}" register: "make_infra_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_kuttl_run" dry_run: "{{ make_infra_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_kuttl_run_env|default({})), **(make_infra_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_infra_ku0000644000175000017500000000153515134411253033370 0ustar zuulzuul--- - name: Debug make_infra_kuttl_env when: make_infra_kuttl_env is defined ansible.builtin.debug: var: make_infra_kuttl_env - name: Debug make_infra_kuttl_params when: make_infra_kuttl_params is defined ansible.builtin.debug: var: make_infra_kuttl_params - name: Run infra_kuttl retries: "{{ make_infra_kuttl_retries | default(omit) }}" delay: "{{ make_infra_kuttl_delay | default(omit) }}" until: "{{ make_infra_kuttl_until | default(true) }}" register: "make_infra_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make infra_kuttl" dry_run: "{{ make_infra_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_infra_kuttl_env|default({})), **(make_infra_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_k0000644000175000017500000000165015134411253033365 0ustar zuulzuul--- - name: Debug make_ironic_kuttl_run_env when: make_ironic_kuttl_run_env is defined ansible.builtin.debug: var: make_ironic_kuttl_run_env - name: Debug make_ironic_kuttl_run_params when: make_ironic_kuttl_run_params is defined ansible.builtin.debug: var: make_ironic_kuttl_run_params - name: Run ironic_kuttl_run retries: "{{ make_ironic_kuttl_run_retries | default(omit) }}" delay: "{{ make_ironic_kuttl_run_delay | default(omit) }}" until: "{{ make_ironic_kuttl_run_until | default(true) }}" register: "make_ironic_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_kuttl_run" dry_run: "{{ make_ironic_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_kuttl_run_env|default({})), **(make_ironic_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_k0000644000175000017500000000155415134411253033370 0ustar zuulzuul--- - name: Debug make_ironic_kuttl_env when: make_ironic_kuttl_env is defined ansible.builtin.debug: var: make_ironic_kuttl_env - name: Debug make_ironic_kuttl_params when: make_ironic_kuttl_params is defined ansible.builtin.debug: var: make_ironic_kuttl_params - name: Run ironic_kuttl retries: "{{ make_ironic_kuttl_retries | default(omit) }}" delay: "{{ make_ironic_kuttl_delay | default(omit) }}" until: "{{ make_ironic_kuttl_until | default(true) }}" register: "make_ironic_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_kuttl" dry_run: "{{ make_ironic_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_kuttl_env|default({})), **(make_ironic_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_kuttl_crc.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ironic_k0000644000175000017500000000165015134411253033365 0ustar zuulzuul--- - name: Debug make_ironic_kuttl_crc_env when: make_ironic_kuttl_crc_env is defined ansible.builtin.debug: var: make_ironic_kuttl_crc_env - name: Debug make_ironic_kuttl_crc_params when: make_ironic_kuttl_crc_params is defined ansible.builtin.debug: var: make_ironic_kuttl_crc_params - name: Run ironic_kuttl_crc retries: "{{ make_ironic_kuttl_crc_retries | default(omit) }}" delay: "{{ make_ironic_kuttl_crc_delay | default(omit) }}" until: "{{ make_ironic_kuttl_crc_until | default(true) }}" register: "make_ironic_kuttl_crc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ironic_kuttl_crc" dry_run: "{{ make_ironic_kuttl_crc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ironic_kuttl_crc_env|default({})), **(make_ironic_kuttl_crc_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kut0000644000175000017500000000161215134411253033372 0ustar zuulzuul--- - name: Debug make_heat_kuttl_run_env when: make_heat_kuttl_run_env is defined ansible.builtin.debug: var: make_heat_kuttl_run_env - name: Debug make_heat_kuttl_run_params when: make_heat_kuttl_run_params is defined ansible.builtin.debug: var: make_heat_kuttl_run_params - name: Run heat_kuttl_run retries: "{{ make_heat_kuttl_run_retries | default(omit) }}" delay: "{{ make_heat_kuttl_run_delay | default(omit) }}" until: "{{ make_heat_kuttl_run_until | default(true) }}" register: "make_heat_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_kuttl_run" dry_run: "{{ make_heat_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_kuttl_run_env|default({})), **(make_heat_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kut0000644000175000017500000000151615134411253033375 0ustar zuulzuul--- - name: Debug make_heat_kuttl_env when: make_heat_kuttl_env is defined ansible.builtin.debug: var: make_heat_kuttl_env - name: Debug make_heat_kuttl_params when: make_heat_kuttl_params is defined ansible.builtin.debug: var: make_heat_kuttl_params - name: Run heat_kuttl retries: "{{ make_heat_kuttl_retries | default(omit) }}" delay: "{{ make_heat_kuttl_delay | default(omit) }}" until: "{{ make_heat_kuttl_until | default(true) }}" register: "make_heat_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_kuttl" dry_run: "{{ make_heat_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_kuttl_env|default({})), **(make_heat_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kuttl_crc.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_kut0000644000175000017500000000161215134411253033372 0ustar zuulzuul--- - name: Debug make_heat_kuttl_crc_env when: make_heat_kuttl_crc_env is defined ansible.builtin.debug: var: make_heat_kuttl_crc_env - name: Debug make_heat_kuttl_crc_params when: make_heat_kuttl_crc_params is defined ansible.builtin.debug: var: make_heat_kuttl_crc_params - name: Run heat_kuttl_crc retries: "{{ make_heat_kuttl_crc_retries | default(omit) }}" delay: "{{ make_heat_kuttl_crc_delay | default(omit) }}" until: "{{ make_heat_kuttl_crc_until | default(true) }}" register: "make_heat_kuttl_crc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_kuttl_crc" dry_run: "{{ make_heat_kuttl_crc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_kuttl_crc_env|default({})), **(make_heat_kuttl_crc_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000172515134411253033355 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_run_env when: make_ansibleee_kuttl_run_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_run_env - name: Debug make_ansibleee_kuttl_run_params when: make_ansibleee_kuttl_run_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_run_params - name: Run ansibleee_kuttl_run retries: "{{ make_ansibleee_kuttl_run_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_run_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_run_until | default(true) }}" register: "make_ansibleee_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl_run" dry_run: "{{ make_ansibleee_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_run_env|default({})), **(make_ansibleee_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000202115134411253033343 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_cleanup_env when: make_ansibleee_kuttl_cleanup_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_cleanup_env - name: Debug make_ansibleee_kuttl_cleanup_params when: make_ansibleee_kuttl_cleanup_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_cleanup_params - name: Run ansibleee_kuttl_cleanup retries: "{{ make_ansibleee_kuttl_cleanup_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_cleanup_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_cleanup_until | default(true) }}" register: "make_ansibleee_kuttl_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl_cleanup" dry_run: "{{ make_ansibleee_kuttl_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_cleanup_env|default({})), **(make_ansibleee_kuttl_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000174415134411253033356 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_prep_env when: make_ansibleee_kuttl_prep_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_prep_env - name: Debug make_ansibleee_kuttl_prep_params when: make_ansibleee_kuttl_prep_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_prep_params - name: Run ansibleee_kuttl_prep retries: "{{ make_ansibleee_kuttl_prep_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_prep_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_prep_until | default(true) }}" register: "make_ansibleee_kuttl_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl_prep" dry_run: "{{ make_ansibleee_kuttl_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_prep_env|default({})), **(make_ansibleee_kuttl_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000163115134411253033351 0ustar zuulzuul--- - name: Debug make_ansibleee_kuttl_env when: make_ansibleee_kuttl_env is defined ansible.builtin.debug: var: make_ansibleee_kuttl_env - name: Debug make_ansibleee_kuttl_params when: make_ansibleee_kuttl_params is defined ansible.builtin.debug: var: make_ansibleee_kuttl_params - name: Run ansibleee_kuttl retries: "{{ make_ansibleee_kuttl_retries | default(omit) }}" delay: "{{ make_ansibleee_kuttl_delay | default(omit) }}" until: "{{ make_ansibleee_kuttl_until | default(true) }}" register: "make_ansibleee_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_kuttl" dry_run: "{{ make_ansibleee_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_kuttl_env|default({})), **(make_ansibleee_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_k0000644000175000017500000000165015134411253033333 0ustar zuulzuul--- - name: Debug make_glance_kuttl_run_env when: make_glance_kuttl_run_env is defined ansible.builtin.debug: var: make_glance_kuttl_run_env - name: Debug make_glance_kuttl_run_params when: make_glance_kuttl_run_params is defined ansible.builtin.debug: var: make_glance_kuttl_run_params - name: Run glance_kuttl_run retries: "{{ make_glance_kuttl_run_retries | default(omit) }}" delay: "{{ make_glance_kuttl_run_delay | default(omit) }}" until: "{{ make_glance_kuttl_run_until | default(true) }}" register: "make_glance_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_kuttl_run" dry_run: "{{ make_glance_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_kuttl_run_env|default({})), **(make_glance_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_glance_k0000644000175000017500000000155415134411253033336 0ustar zuulzuul--- - name: Debug make_glance_kuttl_env when: make_glance_kuttl_env is defined ansible.builtin.debug: var: make_glance_kuttl_env - name: Debug make_glance_kuttl_params when: make_glance_kuttl_params is defined ansible.builtin.debug: var: make_glance_kuttl_params - name: Run glance_kuttl retries: "{{ make_glance_kuttl_retries | default(omit) }}" delay: "{{ make_glance_kuttl_delay | default(omit) }}" until: "{{ make_glance_kuttl_until | default(true) }}" register: "make_glance_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make glance_kuttl" dry_run: "{{ make_glance_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_glance_kuttl_env|default({})), **(make_glance_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_k0000644000175000017500000000165015134411253033343 0ustar zuulzuul--- - name: Debug make_manila_kuttl_run_env when: make_manila_kuttl_run_env is defined ansible.builtin.debug: var: make_manila_kuttl_run_env - name: Debug make_manila_kuttl_run_params when: make_manila_kuttl_run_params is defined ansible.builtin.debug: var: make_manila_kuttl_run_params - name: Run manila_kuttl_run retries: "{{ make_manila_kuttl_run_retries | default(omit) }}" delay: "{{ make_manila_kuttl_run_delay | default(omit) }}" until: "{{ make_manila_kuttl_run_until | default(true) }}" register: "make_manila_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_kuttl_run" dry_run: "{{ make_manila_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_kuttl_run_env|default({})), **(make_manila_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_k0000644000175000017500000000155415134411253033346 0ustar zuulzuul--- - name: Debug make_manila_kuttl_env when: make_manila_kuttl_env is defined ansible.builtin.debug: var: make_manila_kuttl_env - name: Debug make_manila_kuttl_params when: make_manila_kuttl_params is defined ansible.builtin.debug: var: make_manila_kuttl_params - name: Run manila_kuttl retries: "{{ make_manila_kuttl_retries | default(omit) }}" delay: "{{ make_manila_kuttl_delay | default(omit) }}" until: "{{ make_manila_kuttl_until | default(true) }}" register: "make_manila_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_kuttl" dry_run: "{{ make_manila_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_kuttl_env|default({})), **(make_manila_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_ku0000644000175000017500000000163115134411253033422 0ustar zuulzuul--- - name: Debug make_swift_kuttl_run_env when: make_swift_kuttl_run_env is defined ansible.builtin.debug: var: make_swift_kuttl_run_env - name: Debug make_swift_kuttl_run_params when: make_swift_kuttl_run_params is defined ansible.builtin.debug: var: make_swift_kuttl_run_params - name: Run swift_kuttl_run retries: "{{ make_swift_kuttl_run_retries | default(omit) }}" delay: "{{ make_swift_kuttl_run_delay | default(omit) }}" until: "{{ make_swift_kuttl_run_until | default(true) }}" register: "make_swift_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_kuttl_run" dry_run: "{{ make_swift_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_kuttl_run_env|default({})), **(make_swift_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_ku0000644000175000017500000000153515134411253033425 0ustar zuulzuul--- - name: Debug make_swift_kuttl_env when: make_swift_kuttl_env is defined ansible.builtin.debug: var: make_swift_kuttl_env - name: Debug make_swift_kuttl_params when: make_swift_kuttl_params is defined ansible.builtin.debug: var: make_swift_kuttl_params - name: Run swift_kuttl retries: "{{ make_swift_kuttl_retries | default(omit) }}" delay: "{{ make_swift_kuttl_delay | default(omit) }}" until: "{{ make_swift_kuttl_until | default(true) }}" register: "make_swift_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_kuttl" dry_run: "{{ make_swift_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_kuttl_env|default({})), **(make_swift_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000166715134411253033427 0ustar zuulzuul--- - name: Debug make_horizon_kuttl_run_env when: make_horizon_kuttl_run_env is defined ansible.builtin.debug: var: make_horizon_kuttl_run_env - name: Debug make_horizon_kuttl_run_params when: make_horizon_kuttl_run_params is defined ansible.builtin.debug: var: make_horizon_kuttl_run_params - name: Run horizon_kuttl_run retries: "{{ make_horizon_kuttl_run_retries | default(omit) }}" delay: "{{ make_horizon_kuttl_run_delay | default(omit) }}" until: "{{ make_horizon_kuttl_run_until | default(true) }}" register: "make_horizon_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_kuttl_run" dry_run: "{{ make_horizon_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_kuttl_run_env|default({})), **(make_horizon_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000157315134411253033423 0ustar zuulzuul--- - name: Debug make_horizon_kuttl_env when: make_horizon_kuttl_env is defined ansible.builtin.debug: var: make_horizon_kuttl_env - name: Debug make_horizon_kuttl_params when: make_horizon_kuttl_params is defined ansible.builtin.debug: var: make_horizon_kuttl_params - name: Run horizon_kuttl retries: "{{ make_horizon_kuttl_retries | default(omit) }}" delay: "{{ make_horizon_kuttl_delay | default(omit) }}" until: "{{ make_horizon_kuttl_until | default(true) }}" register: "make_horizon_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_kuttl" dry_run: "{{ make_horizon_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_kuttl_env|default({})), **(make_horizon_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000172515134411253033407 0ustar zuulzuul--- - name: Debug make_openstack_kuttl_run_env when: make_openstack_kuttl_run_env is defined ansible.builtin.debug: var: make_openstack_kuttl_run_env - name: Debug make_openstack_kuttl_run_params when: make_openstack_kuttl_run_params is defined ansible.builtin.debug: var: make_openstack_kuttl_run_params - name: Run openstack_kuttl_run retries: "{{ make_openstack_kuttl_run_retries | default(omit) }}" delay: "{{ make_openstack_kuttl_run_delay | default(omit) }}" until: "{{ make_openstack_kuttl_run_until | default(true) }}" register: "make_openstack_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_kuttl_run" dry_run: "{{ make_openstack_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_kuttl_run_env|default({})), **(make_openstack_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstack_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_openstac0000644000175000017500000000163115134411253033403 0ustar zuulzuul--- - name: Debug make_openstack_kuttl_env when: make_openstack_kuttl_env is defined ansible.builtin.debug: var: make_openstack_kuttl_env - name: Debug make_openstack_kuttl_params when: make_openstack_kuttl_params is defined ansible.builtin.debug: var: make_openstack_kuttl_params - name: Run openstack_kuttl retries: "{{ make_openstack_kuttl_retries | default(omit) }}" delay: "{{ make_openstack_kuttl_delay | default(omit) }}" until: "{{ make_openstack_kuttl_until | default(true) }}" register: "make_openstack_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make openstack_kuttl" dry_run: "{{ make_openstack_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_openstack_kuttl_env|default({})), **(make_openstack_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_chainsaw_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000174415134411253033332 0ustar zuulzuul--- - name: Debug make_mariadb_chainsaw_run_env when: make_mariadb_chainsaw_run_env is defined ansible.builtin.debug: var: make_mariadb_chainsaw_run_env - name: Debug make_mariadb_chainsaw_run_params when: make_mariadb_chainsaw_run_params is defined ansible.builtin.debug: var: make_mariadb_chainsaw_run_params - name: Run mariadb_chainsaw_run retries: "{{ make_mariadb_chainsaw_run_retries | default(omit) }}" delay: "{{ make_mariadb_chainsaw_run_delay | default(omit) }}" until: "{{ make_mariadb_chainsaw_run_until | default(true) }}" register: "make_mariadb_chainsaw_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_chainsaw_run" dry_run: "{{ make_mariadb_chainsaw_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_chainsaw_run_env|default({})), **(make_mariadb_chainsaw_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_chainsaw.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_mariadb_0000644000175000017500000000165015134411253033326 0ustar zuulzuul--- - name: Debug make_mariadb_chainsaw_env when: make_mariadb_chainsaw_env is defined ansible.builtin.debug: var: make_mariadb_chainsaw_env - name: Debug make_mariadb_chainsaw_params when: make_mariadb_chainsaw_params is defined ansible.builtin.debug: var: make_mariadb_chainsaw_params - name: Run mariadb_chainsaw retries: "{{ make_mariadb_chainsaw_retries | default(omit) }}" delay: "{{ make_mariadb_chainsaw_delay | default(omit) }}" until: "{{ make_mariadb_chainsaw_until | default(true) }}" register: "make_mariadb_chainsaw_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make mariadb_chainsaw" dry_run: "{{ make_mariadb_chainsaw_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_mariadb_chainsaw_env|default({})), **(make_mariadb_chainsaw_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000155415134411253033422 0ustar zuulzuul--- - name: Debug make_horizon_prep_env when: make_horizon_prep_env is defined ansible.builtin.debug: var: make_horizon_prep_env - name: Debug make_horizon_prep_params when: make_horizon_prep_params is defined ansible.builtin.debug: var: make_horizon_prep_params - name: Run horizon_prep retries: "{{ make_horizon_prep_retries | default(omit) }}" delay: "{{ make_horizon_prep_delay | default(omit) }}" until: "{{ make_horizon_prep_until | default(true) }}" register: "make_horizon_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_prep" dry_run: "{{ make_horizon_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_prep_env|default({})), **(make_horizon_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon.0000644000175000017500000000144115134411253033334 0ustar zuulzuul--- - name: Debug make_horizon_env when: make_horizon_env is defined ansible.builtin.debug: var: make_horizon_env - name: Debug make_horizon_params when: make_horizon_params is defined ansible.builtin.debug: var: make_horizon_params - name: Run horizon retries: "{{ make_horizon_retries | default(omit) }}" delay: "{{ make_horizon_delay | default(omit) }}" until: "{{ make_horizon_until | default(true) }}" register: "make_horizon_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon" dry_run: "{{ make_horizon_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_env|default({})), **(make_horizon_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000163115134411253033416 0ustar zuulzuul--- - name: Debug make_horizon_cleanup_env when: make_horizon_cleanup_env is defined ansible.builtin.debug: var: make_horizon_cleanup_env - name: Debug make_horizon_cleanup_params when: make_horizon_cleanup_params is defined ansible.builtin.debug: var: make_horizon_cleanup_params - name: Run horizon_cleanup retries: "{{ make_horizon_cleanup_retries | default(omit) }}" delay: "{{ make_horizon_cleanup_delay | default(omit) }}" until: "{{ make_horizon_cleanup_until | default(true) }}" register: "make_horizon_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_cleanup" dry_run: "{{ make_horizon_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_cleanup_env|default({})), **(make_horizon_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000172515134411253033422 0ustar zuulzuul--- - name: Debug make_horizon_deploy_prep_env when: make_horizon_deploy_prep_env is defined ansible.builtin.debug: var: make_horizon_deploy_prep_env - name: Debug make_horizon_deploy_prep_params when: make_horizon_deploy_prep_params is defined ansible.builtin.debug: var: make_horizon_deploy_prep_params - name: Run horizon_deploy_prep retries: "{{ make_horizon_deploy_prep_retries | default(omit) }}" delay: "{{ make_horizon_deploy_prep_delay | default(omit) }}" until: "{{ make_horizon_deploy_prep_until | default(true) }}" register: "make_horizon_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_deploy_prep" dry_run: "{{ make_horizon_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_deploy_prep_env|default({})), **(make_horizon_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000161215134411253033415 0ustar zuulzuul--- - name: Debug make_horizon_deploy_env when: make_horizon_deploy_env is defined ansible.builtin.debug: var: make_horizon_deploy_env - name: Debug make_horizon_deploy_params when: make_horizon_deploy_params is defined ansible.builtin.debug: var: make_horizon_deploy_params - name: Run horizon_deploy retries: "{{ make_horizon_deploy_retries | default(omit) }}" delay: "{{ make_horizon_deploy_delay | default(omit) }}" until: "{{ make_horizon_deploy_until | default(true) }}" register: "make_horizon_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_deploy" dry_run: "{{ make_horizon_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_deploy_env|default({})), **(make_horizon_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_horizon_0000644000175000017500000000200215134411253033407 0ustar zuulzuul--- - name: Debug make_horizon_deploy_cleanup_env when: make_horizon_deploy_cleanup_env is defined ansible.builtin.debug: var: make_horizon_deploy_cleanup_env - name: Debug make_horizon_deploy_cleanup_params when: make_horizon_deploy_cleanup_params is defined ansible.builtin.debug: var: make_horizon_deploy_cleanup_params - name: Run horizon_deploy_cleanup retries: "{{ make_horizon_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_horizon_deploy_cleanup_delay | default(omit) }}" until: "{{ make_horizon_deploy_cleanup_until | default(true) }}" register: "make_horizon_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make horizon_deploy_cleanup" dry_run: "{{ make_horizon_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_horizon_deploy_cleanup_env|default({})), **(make_horizon_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_pre0000644000175000017500000000147715134411253033366 0ustar zuulzuul--- - name: Debug make_heat_prep_env when: make_heat_prep_env is defined ansible.builtin.debug: var: make_heat_prep_env - name: Debug make_heat_prep_params when: make_heat_prep_params is defined ansible.builtin.debug: var: make_heat_prep_params - name: Run heat_prep retries: "{{ make_heat_prep_retries | default(omit) }}" delay: "{{ make_heat_prep_delay | default(omit) }}" until: "{{ make_heat_prep_until | default(true) }}" register: "make_heat_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_prep" dry_run: "{{ make_heat_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_prep_env|default({})), **(make_heat_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat.yml0000644000175000017500000000136415134411253033313 0ustar zuulzuul--- - name: Debug make_heat_env when: make_heat_env is defined ansible.builtin.debug: var: make_heat_env - name: Debug make_heat_params when: make_heat_params is defined ansible.builtin.debug: var: make_heat_params - name: Run heat retries: "{{ make_heat_retries | default(omit) }}" delay: "{{ make_heat_delay | default(omit) }}" until: "{{ make_heat_until | default(true) }}" register: "make_heat_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat" dry_run: "{{ make_heat_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_env|default({})), **(make_heat_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_cle0000644000175000017500000000155415134411253033337 0ustar zuulzuul--- - name: Debug make_heat_cleanup_env when: make_heat_cleanup_env is defined ansible.builtin.debug: var: make_heat_cleanup_env - name: Debug make_heat_cleanup_params when: make_heat_cleanup_params is defined ansible.builtin.debug: var: make_heat_cleanup_params - name: Run heat_cleanup retries: "{{ make_heat_cleanup_retries | default(omit) }}" delay: "{{ make_heat_cleanup_delay | default(omit) }}" until: "{{ make_heat_cleanup_until | default(true) }}" register: "make_heat_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_cleanup" dry_run: "{{ make_heat_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_cleanup_env|default({})), **(make_heat_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_dep0000644000175000017500000000165015134411253033341 0ustar zuulzuul--- - name: Debug make_heat_deploy_prep_env when: make_heat_deploy_prep_env is defined ansible.builtin.debug: var: make_heat_deploy_prep_env - name: Debug make_heat_deploy_prep_params when: make_heat_deploy_prep_params is defined ansible.builtin.debug: var: make_heat_deploy_prep_params - name: Run heat_deploy_prep retries: "{{ make_heat_deploy_prep_retries | default(omit) }}" delay: "{{ make_heat_deploy_prep_delay | default(omit) }}" until: "{{ make_heat_deploy_prep_until | default(true) }}" register: "make_heat_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_deploy_prep" dry_run: "{{ make_heat_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_deploy_prep_env|default({})), **(make_heat_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_dep0000644000175000017500000000153515134411253033343 0ustar zuulzuul--- - name: Debug make_heat_deploy_env when: make_heat_deploy_env is defined ansible.builtin.debug: var: make_heat_deploy_env - name: Debug make_heat_deploy_params when: make_heat_deploy_params is defined ansible.builtin.debug: var: make_heat_deploy_params - name: Run heat_deploy retries: "{{ make_heat_deploy_retries | default(omit) }}" delay: "{{ make_heat_deploy_delay | default(omit) }}" until: "{{ make_heat_deploy_until | default(true) }}" register: "make_heat_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_deploy" dry_run: "{{ make_heat_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_deploy_env|default({})), **(make_heat_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_heat_dep0000644000175000017500000000172515134411253033344 0ustar zuulzuul--- - name: Debug make_heat_deploy_cleanup_env when: make_heat_deploy_cleanup_env is defined ansible.builtin.debug: var: make_heat_deploy_cleanup_env - name: Debug make_heat_deploy_cleanup_params when: make_heat_deploy_cleanup_params is defined ansible.builtin.debug: var: make_heat_deploy_cleanup_params - name: Run heat_deploy_cleanup retries: "{{ make_heat_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_heat_deploy_cleanup_delay | default(omit) }}" until: "{{ make_heat_deploy_cleanup_until | default(true) }}" register: "make_heat_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make heat_deploy_cleanup" dry_run: "{{ make_heat_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_heat_deploy_cleanup_env|default({})), **(make_heat_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000161215134411253033350 0ustar zuulzuul--- - name: Debug make_ansibleee_prep_env when: make_ansibleee_prep_env is defined ansible.builtin.debug: var: make_ansibleee_prep_env - name: Debug make_ansibleee_prep_params when: make_ansibleee_prep_params is defined ansible.builtin.debug: var: make_ansibleee_prep_params - name: Run ansibleee_prep retries: "{{ make_ansibleee_prep_retries | default(omit) }}" delay: "{{ make_ansibleee_prep_delay | default(omit) }}" until: "{{ make_ansibleee_prep_until | default(true) }}" register: "make_ansibleee_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_prep" dry_run: "{{ make_ansibleee_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_prep_env|default({})), **(make_ansibleee_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000147715134411253033361 0ustar zuulzuul--- - name: Debug make_ansibleee_env when: make_ansibleee_env is defined ansible.builtin.debug: var: make_ansibleee_env - name: Debug make_ansibleee_params when: make_ansibleee_params is defined ansible.builtin.debug: var: make_ansibleee_params - name: Run ansibleee retries: "{{ make_ansibleee_retries | default(omit) }}" delay: "{{ make_ansibleee_delay | default(omit) }}" until: "{{ make_ansibleee_until | default(true) }}" register: "make_ansibleee_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee" dry_run: "{{ make_ansibleee_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_env|default({})), **(make_ansibleee_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansibleee_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ansiblee0000644000175000017500000000166715134411253033362 0ustar zuulzuul--- - name: Debug make_ansibleee_cleanup_env when: make_ansibleee_cleanup_env is defined ansible.builtin.debug: var: make_ansibleee_cleanup_env - name: Debug make_ansibleee_cleanup_params when: make_ansibleee_cleanup_params is defined ansible.builtin.debug: var: make_ansibleee_cleanup_params - name: Run ansibleee_cleanup retries: "{{ make_ansibleee_cleanup_retries | default(omit) }}" delay: "{{ make_ansibleee_cleanup_delay | default(omit) }}" until: "{{ make_ansibleee_cleanup_until | default(true) }}" register: "make_ansibleee_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ansibleee_cleanup" dry_run: "{{ make_ansibleee_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ansibleee_cleanup_env|default({})), **(make_ansibleee_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremetal_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremeta0000644000175000017500000000161215134411253033346 0ustar zuulzuul--- - name: Debug make_baremetal_prep_env when: make_baremetal_prep_env is defined ansible.builtin.debug: var: make_baremetal_prep_env - name: Debug make_baremetal_prep_params when: make_baremetal_prep_params is defined ansible.builtin.debug: var: make_baremetal_prep_params - name: Run baremetal_prep retries: "{{ make_baremetal_prep_retries | default(omit) }}" delay: "{{ make_baremetal_prep_delay | default(omit) }}" until: "{{ make_baremetal_prep_until | default(true) }}" register: "make_baremetal_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make baremetal_prep" dry_run: "{{ make_baremetal_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_baremetal_prep_env|default({})), **(make_baremetal_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremetal.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremeta0000644000175000017500000000147715134411253033357 0ustar zuulzuul--- - name: Debug make_baremetal_env when: make_baremetal_env is defined ansible.builtin.debug: var: make_baremetal_env - name: Debug make_baremetal_params when: make_baremetal_params is defined ansible.builtin.debug: var: make_baremetal_params - name: Run baremetal retries: "{{ make_baremetal_retries | default(omit) }}" delay: "{{ make_baremetal_delay | default(omit) }}" until: "{{ make_baremetal_until | default(true) }}" register: "make_baremetal_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make baremetal" dry_run: "{{ make_baremetal_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_baremetal_env|default({})), **(make_baremetal_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremetal_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_baremeta0000644000175000017500000000166715134411253033360 0ustar zuulzuul--- - name: Debug make_baremetal_cleanup_env when: make_baremetal_cleanup_env is defined ansible.builtin.debug: var: make_baremetal_cleanup_env - name: Debug make_baremetal_cleanup_params when: make_baremetal_cleanup_params is defined ansible.builtin.debug: var: make_baremetal_cleanup_params - name: Run baremetal_cleanup retries: "{{ make_baremetal_cleanup_retries | default(omit) }}" delay: "{{ make_baremetal_cleanup_delay | default(omit) }}" until: "{{ make_baremetal_cleanup_until | default(true) }}" register: "make_baremetal_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make baremetal_cleanup" dry_run: "{{ make_baremetal_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_baremetal_cleanup_env|default({})), **(make_baremetal_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_help.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_hel0000644000175000017500000000147715134411253033346 0ustar zuulzuul--- - name: Debug make_ceph_help_env when: make_ceph_help_env is defined ansible.builtin.debug: var: make_ceph_help_env - name: Debug make_ceph_help_params when: make_ceph_help_params is defined ansible.builtin.debug: var: make_ceph_help_params - name: Run ceph_help retries: "{{ make_ceph_help_retries | default(omit) }}" delay: "{{ make_ceph_help_delay | default(omit) }}" until: "{{ make_ceph_help_until | default(true) }}" register: "make_ceph_help_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ceph_help" dry_run: "{{ make_ceph_help_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ceph_help_env|default({})), **(make_ceph_help_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph.yml0000644000175000017500000000136415134411253033311 0ustar zuulzuul--- - name: Debug make_ceph_env when: make_ceph_env is defined ansible.builtin.debug: var: make_ceph_env - name: Debug make_ceph_params when: make_ceph_params is defined ansible.builtin.debug: var: make_ceph_params - name: Run ceph retries: "{{ make_ceph_retries | default(omit) }}" delay: "{{ make_ceph_delay | default(omit) }}" until: "{{ make_ceph_until | default(true) }}" register: "make_ceph_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ceph" dry_run: "{{ make_ceph_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ceph_env|default({})), **(make_ceph_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ceph_cle0000644000175000017500000000155415134411253033335 0ustar zuulzuul--- - name: Debug make_ceph_cleanup_env when: make_ceph_cleanup_env is defined ansible.builtin.debug: var: make_ceph_cleanup_env - name: Debug make_ceph_cleanup_params when: make_ceph_cleanup_params is defined ansible.builtin.debug: var: make_ceph_cleanup_params - name: Run ceph_cleanup retries: "{{ make_ceph_cleanup_retries | default(omit) }}" delay: "{{ make_ceph_cleanup_delay | default(omit) }}" until: "{{ make_ceph_cleanup_until | default(true) }}" register: "make_ceph_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make ceph_cleanup" dry_run: "{{ make_ceph_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ceph_cleanup_env|default({})), **(make_ceph_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_pre0000644000175000017500000000147715134411253033417 0ustar zuulzuul--- - name: Debug make_rook_prep_env when: make_rook_prep_env is defined ansible.builtin.debug: var: make_rook_prep_env - name: Debug make_rook_prep_params when: make_rook_prep_params is defined ansible.builtin.debug: var: make_rook_prep_params - name: Run rook_prep retries: "{{ make_rook_prep_retries | default(omit) }}" delay: "{{ make_rook_prep_delay | default(omit) }}" until: "{{ make_rook_prep_until | default(true) }}" register: "make_rook_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_prep" dry_run: "{{ make_rook_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_prep_env|default({})), **(make_rook_prep_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook.yml0000644000175000017500000000136415134411253033344 0ustar zuulzuul--- - name: Debug make_rook_env when: make_rook_env is defined ansible.builtin.debug: var: make_rook_env - name: Debug make_rook_params when: make_rook_params is defined ansible.builtin.debug: var: make_rook_params - name: Run rook retries: "{{ make_rook_retries | default(omit) }}" delay: "{{ make_rook_delay | default(omit) }}" until: "{{ make_rook_until | default(true) }}" register: "make_rook_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook" dry_run: "{{ make_rook_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_env|default({})), **(make_rook_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_dep0000644000175000017500000000165015134411253033372 0ustar zuulzuul--- - name: Debug make_rook_deploy_prep_env when: make_rook_deploy_prep_env is defined ansible.builtin.debug: var: make_rook_deploy_prep_env - name: Debug make_rook_deploy_prep_params when: make_rook_deploy_prep_params is defined ansible.builtin.debug: var: make_rook_deploy_prep_params - name: Run rook_deploy_prep retries: "{{ make_rook_deploy_prep_retries | default(omit) }}" delay: "{{ make_rook_deploy_prep_delay | default(omit) }}" until: "{{ make_rook_deploy_prep_until | default(true) }}" register: "make_rook_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_deploy_prep" dry_run: "{{ make_rook_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_deploy_prep_env|default({})), **(make_rook_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_dep0000644000175000017500000000153515134411253033374 0ustar zuulzuul--- - name: Debug make_rook_deploy_env when: make_rook_deploy_env is defined ansible.builtin.debug: var: make_rook_deploy_env - name: Debug make_rook_deploy_params when: make_rook_deploy_params is defined ansible.builtin.debug: var: make_rook_deploy_params - name: Run rook_deploy retries: "{{ make_rook_deploy_retries | default(omit) }}" delay: "{{ make_rook_deploy_delay | default(omit) }}" until: "{{ make_rook_deploy_until | default(true) }}" register: "make_rook_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_deploy" dry_run: "{{ make_rook_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_deploy_env|default({})), **(make_rook_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_crc_disk.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_crc0000644000175000017500000000157315134411253033375 0ustar zuulzuul--- - name: Debug make_rook_crc_disk_env when: make_rook_crc_disk_env is defined ansible.builtin.debug: var: make_rook_crc_disk_env - name: Debug make_rook_crc_disk_params when: make_rook_crc_disk_params is defined ansible.builtin.debug: var: make_rook_crc_disk_params - name: Run rook_crc_disk retries: "{{ make_rook_crc_disk_retries | default(omit) }}" delay: "{{ make_rook_crc_disk_delay | default(omit) }}" until: "{{ make_rook_crc_disk_until | default(true) }}" register: "make_rook_crc_disk_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_crc_disk" dry_run: "{{ make_rook_crc_disk_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_crc_disk_env|default({})), **(make_rook_crc_disk_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_rook_cle0000644000175000017500000000155415134411253033370 0ustar zuulzuul--- - name: Debug make_rook_cleanup_env when: make_rook_cleanup_env is defined ansible.builtin.debug: var: make_rook_cleanup_env - name: Debug make_rook_cleanup_params when: make_rook_cleanup_params is defined ansible.builtin.debug: var: make_rook_cleanup_params - name: Run rook_cleanup retries: "{{ make_rook_cleanup_retries | default(omit) }}" delay: "{{ make_rook_cleanup_delay | default(omit) }}" until: "{{ make_rook_cleanup_until | default(true) }}" register: "make_rook_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make rook_cleanup" dry_run: "{{ make_rook_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_rook_cleanup_env|default({})), **(make_rook_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_lvms.yml0000644000175000017500000000136415134411253033353 0ustar zuulzuul--- - name: Debug make_lvms_env when: make_lvms_env is defined ansible.builtin.debug: var: make_lvms_env - name: Debug make_lvms_params when: make_lvms_params is defined ansible.builtin.debug: var: make_lvms_params - name: Run lvms retries: "{{ make_lvms_retries | default(omit) }}" delay: "{{ make_lvms_delay | default(omit) }}" until: "{{ make_lvms_until | default(true) }}" register: "make_lvms_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make lvms" dry_run: "{{ make_lvms_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_lvms_env|default({})), **(make_lvms_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nmstate.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nmstate.0000644000175000017500000000144115134411253033317 0ustar zuulzuul--- - name: Debug make_nmstate_env when: make_nmstate_env is defined ansible.builtin.debug: var: make_nmstate_env - name: Debug make_nmstate_params when: make_nmstate_params is defined ansible.builtin.debug: var: make_nmstate_params - name: Run nmstate retries: "{{ make_nmstate_retries | default(omit) }}" delay: "{{ make_nmstate_delay | default(omit) }}" until: "{{ make_nmstate_until | default(true) }}" register: "make_nmstate_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nmstate" dry_run: "{{ make_nmstate_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nmstate_env|default({})), **(make_nmstate_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nncp.yml0000644000175000017500000000136415134411253033330 0ustar zuulzuul--- - name: Debug make_nncp_env when: make_nncp_env is defined ansible.builtin.debug: var: make_nncp_env - name: Debug make_nncp_params when: make_nncp_params is defined ansible.builtin.debug: var: make_nncp_params - name: Run nncp retries: "{{ make_nncp_retries | default(omit) }}" delay: "{{ make_nncp_delay | default(omit) }}" until: "{{ make_nncp_until | default(true) }}" register: "make_nncp_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nncp" dry_run: "{{ make_nncp_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nncp_env|default({})), **(make_nncp_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nncp_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nncp_cle0000644000175000017500000000155415134411253033354 0ustar zuulzuul--- - name: Debug make_nncp_cleanup_env when: make_nncp_cleanup_env is defined ansible.builtin.debug: var: make_nncp_cleanup_env - name: Debug make_nncp_cleanup_params when: make_nncp_cleanup_params is defined ansible.builtin.debug: var: make_nncp_cleanup_params - name: Run nncp_cleanup retries: "{{ make_nncp_cleanup_retries | default(omit) }}" delay: "{{ make_nncp_cleanup_delay | default(omit) }}" until: "{{ make_nncp_cleanup_until | default(true) }}" register: "make_nncp_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make nncp_cleanup" dry_run: "{{ make_nncp_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nncp_cleanup_env|default({})), **(make_nncp_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattach.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattac0000644000175000017500000000147715134411253033402 0ustar zuulzuul--- - name: Debug make_netattach_env when: make_netattach_env is defined ansible.builtin.debug: var: make_netattach_env - name: Debug make_netattach_params when: make_netattach_params is defined ansible.builtin.debug: var: make_netattach_params - name: Run netattach retries: "{{ make_netattach_retries | default(omit) }}" delay: "{{ make_netattach_delay | default(omit) }}" until: "{{ make_netattach_until | default(true) }}" register: "make_netattach_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netattach" dry_run: "{{ make_netattach_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netattach_env|default({})), **(make_netattach_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattach_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netattac0000644000175000017500000000166715134411253033403 0ustar zuulzuul--- - name: Debug make_netattach_cleanup_env when: make_netattach_cleanup_env is defined ansible.builtin.debug: var: make_netattach_cleanup_env - name: Debug make_netattach_cleanup_params when: make_netattach_cleanup_params is defined ansible.builtin.debug: var: make_netattach_cleanup_params - name: Run netattach_cleanup retries: "{{ make_netattach_cleanup_retries | default(omit) }}" delay: "{{ make_netattach_cleanup_delay | default(omit) }}" until: "{{ make_netattach_cleanup_until | default(true) }}" register: "make_netattach_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netattach_cleanup" dry_run: "{{ make_netattach_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netattach_cleanup_env|default({})), **(make_netattach_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb.0000644000175000017500000000144115134411253033264 0ustar zuulzuul--- - name: Debug make_metallb_env when: make_metallb_env is defined ansible.builtin.debug: var: make_metallb_env - name: Debug make_metallb_params when: make_metallb_params is defined ansible.builtin.debug: var: make_metallb_params - name: Run metallb retries: "{{ make_metallb_retries | default(omit) }}" delay: "{{ make_metallb_delay | default(omit) }}" until: "{{ make_metallb_until | default(true) }}" register: "make_metallb_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb" dry_run: "{{ make_metallb_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_env|default({})), **(make_metallb_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_config.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_0000644000175000017500000000161215134411253033345 0ustar zuulzuul--- - name: Debug make_metallb_config_env when: make_metallb_config_env is defined ansible.builtin.debug: var: make_metallb_config_env - name: Debug make_metallb_config_params when: make_metallb_config_params is defined ansible.builtin.debug: var: make_metallb_config_params - name: Run metallb_config retries: "{{ make_metallb_config_retries | default(omit) }}" delay: "{{ make_metallb_config_delay | default(omit) }}" until: "{{ make_metallb_config_until | default(true) }}" register: "make_metallb_config_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb_config" dry_run: "{{ make_metallb_config_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_config_env|default({})), **(make_metallb_config_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_config_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_0000644000175000017500000000200215134411253033337 0ustar zuulzuul--- - name: Debug make_metallb_config_cleanup_env when: make_metallb_config_cleanup_env is defined ansible.builtin.debug: var: make_metallb_config_cleanup_env - name: Debug make_metallb_config_cleanup_params when: make_metallb_config_cleanup_params is defined ansible.builtin.debug: var: make_metallb_config_cleanup_params - name: Run metallb_config_cleanup retries: "{{ make_metallb_config_cleanup_retries | default(omit) }}" delay: "{{ make_metallb_config_cleanup_delay | default(omit) }}" until: "{{ make_metallb_config_cleanup_until | default(true) }}" register: "make_metallb_config_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb_config_cleanup" dry_run: "{{ make_metallb_config_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_config_cleanup_env|default({})), **(make_metallb_config_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_metallb_0000644000175000017500000000163115134411253033346 0ustar zuulzuul--- - name: Debug make_metallb_cleanup_env when: make_metallb_cleanup_env is defined ansible.builtin.debug: var: make_metallb_cleanup_env - name: Debug make_metallb_cleanup_params when: make_metallb_cleanup_params is defined ansible.builtin.debug: var: make_metallb_cleanup_params - name: Run metallb_cleanup retries: "{{ make_metallb_cleanup_retries | default(omit) }}" delay: "{{ make_metallb_cleanup_delay | default(omit) }}" until: "{{ make_metallb_cleanup_until | default(true) }}" register: "make_metallb_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make metallb_cleanup" dry_run: "{{ make_metallb_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_metallb_cleanup_env|default({})), **(make_metallb_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki.yml0000644000175000017500000000136415134411253033330 0ustar zuulzuul--- - name: Debug make_loki_env when: make_loki_env is defined ansible.builtin.debug: var: make_loki_env - name: Debug make_loki_params when: make_loki_params is defined ansible.builtin.debug: var: make_loki_params - name: Run loki retries: "{{ make_loki_retries | default(omit) }}" delay: "{{ make_loki_delay | default(omit) }}" until: "{{ make_loki_until | default(true) }}" register: "make_loki_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki" dry_run: "{{ make_loki_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_env|default({})), **(make_loki_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_cle0000644000175000017500000000155415134411253033354 0ustar zuulzuul--- - name: Debug make_loki_cleanup_env when: make_loki_cleanup_env is defined ansible.builtin.debug: var: make_loki_cleanup_env - name: Debug make_loki_cleanup_params when: make_loki_cleanup_params is defined ansible.builtin.debug: var: make_loki_cleanup_params - name: Run loki_cleanup retries: "{{ make_loki_cleanup_retries | default(omit) }}" delay: "{{ make_loki_cleanup_delay | default(omit) }}" until: "{{ make_loki_cleanup_until | default(true) }}" register: "make_loki_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki_cleanup" dry_run: "{{ make_loki_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_cleanup_env|default({})), **(make_loki_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_dep0000644000175000017500000000153515134411253033360 0ustar zuulzuul--- - name: Debug make_loki_deploy_env when: make_loki_deploy_env is defined ansible.builtin.debug: var: make_loki_deploy_env - name: Debug make_loki_deploy_params when: make_loki_deploy_params is defined ansible.builtin.debug: var: make_loki_deploy_params - name: Run loki_deploy retries: "{{ make_loki_deploy_retries | default(omit) }}" delay: "{{ make_loki_deploy_delay | default(omit) }}" until: "{{ make_loki_deploy_until | default(true) }}" register: "make_loki_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki_deploy" dry_run: "{{ make_loki_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_deploy_env|default({})), **(make_loki_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_loki_dep0000644000175000017500000000172515134411253033361 0ustar zuulzuul--- - name: Debug make_loki_deploy_cleanup_env when: make_loki_deploy_cleanup_env is defined ansible.builtin.debug: var: make_loki_deploy_cleanup_env - name: Debug make_loki_deploy_cleanup_params when: make_loki_deploy_cleanup_params is defined ansible.builtin.debug: var: make_loki_deploy_cleanup_params - name: Run loki_deploy_cleanup retries: "{{ make_loki_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_loki_deploy_cleanup_delay | default(omit) }}" until: "{{ make_loki_deploy_cleanup_until | default(true) }}" register: "make_loki_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make loki_deploy_cleanup" dry_run: "{{ make_loki_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_loki_deploy_cleanup_env|default({})), **(make_loki_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000147715134411253033420 0ustar zuulzuul--- - name: Debug make_netobserv_env when: make_netobserv_env is defined ansible.builtin.debug: var: make_netobserv_env - name: Debug make_netobserv_params when: make_netobserv_params is defined ansible.builtin.debug: var: make_netobserv_params - name: Run netobserv retries: "{{ make_netobserv_retries | default(omit) }}" delay: "{{ make_netobserv_delay | default(omit) }}" until: "{{ make_netobserv_until | default(true) }}" register: "make_netobserv_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv" dry_run: "{{ make_netobserv_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_env|default({})), **(make_netobserv_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000166715134411253033421 0ustar zuulzuul--- - name: Debug make_netobserv_cleanup_env when: make_netobserv_cleanup_env is defined ansible.builtin.debug: var: make_netobserv_cleanup_env - name: Debug make_netobserv_cleanup_params when: make_netobserv_cleanup_params is defined ansible.builtin.debug: var: make_netobserv_cleanup_params - name: Run netobserv_cleanup retries: "{{ make_netobserv_cleanup_retries | default(omit) }}" delay: "{{ make_netobserv_cleanup_delay | default(omit) }}" until: "{{ make_netobserv_cleanup_until | default(true) }}" register: "make_netobserv_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv_cleanup" dry_run: "{{ make_netobserv_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_cleanup_env|default({})), **(make_netobserv_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000165015134411253033411 0ustar zuulzuul--- - name: Debug make_netobserv_deploy_env when: make_netobserv_deploy_env is defined ansible.builtin.debug: var: make_netobserv_deploy_env - name: Debug make_netobserv_deploy_params when: make_netobserv_deploy_params is defined ansible.builtin.debug: var: make_netobserv_deploy_params - name: Run netobserv_deploy retries: "{{ make_netobserv_deploy_retries | default(omit) }}" delay: "{{ make_netobserv_deploy_delay | default(omit) }}" until: "{{ make_netobserv_deploy_until | default(true) }}" register: "make_netobserv_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv_deploy" dry_run: "{{ make_netobserv_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_deploy_env|default({})), **(make_netobserv_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobserv_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_netobser0000644000175000017500000000204015134411253033403 0ustar zuulzuul--- - name: Debug make_netobserv_deploy_cleanup_env when: make_netobserv_deploy_cleanup_env is defined ansible.builtin.debug: var: make_netobserv_deploy_cleanup_env - name: Debug make_netobserv_deploy_cleanup_params when: make_netobserv_deploy_cleanup_params is defined ansible.builtin.debug: var: make_netobserv_deploy_cleanup_params - name: Run netobserv_deploy_cleanup retries: "{{ make_netobserv_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_netobserv_deploy_cleanup_delay | default(omit) }}" until: "{{ make_netobserv_deploy_cleanup_until | default(true) }}" register: "make_netobserv_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make netobserv_deploy_cleanup" dry_run: "{{ make_netobserv_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_netobserv_deploy_cleanup_env|default({})), **(make_netobserv_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_p0000644000175000017500000000153515134411253033352 0ustar zuulzuul--- - name: Debug make_manila_prep_env when: make_manila_prep_env is defined ansible.builtin.debug: var: make_manila_prep_env - name: Debug make_manila_prep_params when: make_manila_prep_params is defined ansible.builtin.debug: var: make_manila_prep_params - name: Run manila_prep retries: "{{ make_manila_prep_retries | default(omit) }}" delay: "{{ make_manila_prep_delay | default(omit) }}" until: "{{ make_manila_prep_until | default(true) }}" register: "make_manila_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_prep" dry_run: "{{ make_manila_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_prep_env|default({})), **(make_manila_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila.y0000644000175000017500000000142215134411253033275 0ustar zuulzuul--- - name: Debug make_manila_env when: make_manila_env is defined ansible.builtin.debug: var: make_manila_env - name: Debug make_manila_params when: make_manila_params is defined ansible.builtin.debug: var: make_manila_params - name: Run manila retries: "{{ make_manila_retries | default(omit) }}" delay: "{{ make_manila_delay | default(omit) }}" until: "{{ make_manila_until | default(true) }}" register: "make_manila_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila" dry_run: "{{ make_manila_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_env|default({})), **(make_manila_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_c0000644000175000017500000000161215134411253033331 0ustar zuulzuul--- - name: Debug make_manila_cleanup_env when: make_manila_cleanup_env is defined ansible.builtin.debug: var: make_manila_cleanup_env - name: Debug make_manila_cleanup_params when: make_manila_cleanup_params is defined ansible.builtin.debug: var: make_manila_cleanup_params - name: Run manila_cleanup retries: "{{ make_manila_cleanup_retries | default(omit) }}" delay: "{{ make_manila_cleanup_delay | default(omit) }}" until: "{{ make_manila_cleanup_until | default(true) }}" register: "make_manila_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_cleanup" dry_run: "{{ make_manila_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_cleanup_env|default({})), **(make_manila_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_d0000644000175000017500000000170615134411253033336 0ustar zuulzuul--- - name: Debug make_manila_deploy_prep_env when: make_manila_deploy_prep_env is defined ansible.builtin.debug: var: make_manila_deploy_prep_env - name: Debug make_manila_deploy_prep_params when: make_manila_deploy_prep_params is defined ansible.builtin.debug: var: make_manila_deploy_prep_params - name: Run manila_deploy_prep retries: "{{ make_manila_deploy_prep_retries | default(omit) }}" delay: "{{ make_manila_deploy_prep_delay | default(omit) }}" until: "{{ make_manila_deploy_prep_until | default(true) }}" register: "make_manila_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_deploy_prep" dry_run: "{{ make_manila_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_deploy_prep_env|default({})), **(make_manila_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_d0000644000175000017500000000157315134411253033340 0ustar zuulzuul--- - name: Debug make_manila_deploy_env when: make_manila_deploy_env is defined ansible.builtin.debug: var: make_manila_deploy_env - name: Debug make_manila_deploy_params when: make_manila_deploy_params is defined ansible.builtin.debug: var: make_manila_deploy_params - name: Run manila_deploy retries: "{{ make_manila_deploy_retries | default(omit) }}" delay: "{{ make_manila_deploy_delay | default(omit) }}" until: "{{ make_manila_deploy_until | default(true) }}" register: "make_manila_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_deploy" dry_run: "{{ make_manila_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_deploy_env|default({})), **(make_manila_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_manila_d0000644000175000017500000000176315134411253033341 0ustar zuulzuul--- - name: Debug make_manila_deploy_cleanup_env when: make_manila_deploy_cleanup_env is defined ansible.builtin.debug: var: make_manila_deploy_cleanup_env - name: Debug make_manila_deploy_cleanup_params when: make_manila_deploy_cleanup_params is defined ansible.builtin.debug: var: make_manila_deploy_cleanup_params - name: Run manila_deploy_cleanup retries: "{{ make_manila_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_manila_deploy_cleanup_delay | default(omit) }}" until: "{{ make_manila_deploy_cleanup_until | default(true) }}" register: "make_manila_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make manila_deploy_cleanup" dry_run: "{{ make_manila_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_manila_deploy_cleanup_env|default({})), **(make_manila_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000161215134411253033407 0ustar zuulzuul--- - name: Debug make_telemetry_prep_env when: make_telemetry_prep_env is defined ansible.builtin.debug: var: make_telemetry_prep_env - name: Debug make_telemetry_prep_params when: make_telemetry_prep_params is defined ansible.builtin.debug: var: make_telemetry_prep_params - name: Run telemetry_prep retries: "{{ make_telemetry_prep_retries | default(omit) }}" delay: "{{ make_telemetry_prep_delay | default(omit) }}" until: "{{ make_telemetry_prep_until | default(true) }}" register: "make_telemetry_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_prep" dry_run: "{{ make_telemetry_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_prep_env|default({})), **(make_telemetry_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000147715134411253033420 0ustar zuulzuul--- - name: Debug make_telemetry_env when: make_telemetry_env is defined ansible.builtin.debug: var: make_telemetry_env - name: Debug make_telemetry_params when: make_telemetry_params is defined ansible.builtin.debug: var: make_telemetry_params - name: Run telemetry retries: "{{ make_telemetry_retries | default(omit) }}" delay: "{{ make_telemetry_delay | default(omit) }}" until: "{{ make_telemetry_until | default(true) }}" register: "make_telemetry_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry" dry_run: "{{ make_telemetry_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_env|default({})), **(make_telemetry_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000166715134411253033421 0ustar zuulzuul--- - name: Debug make_telemetry_cleanup_env when: make_telemetry_cleanup_env is defined ansible.builtin.debug: var: make_telemetry_cleanup_env - name: Debug make_telemetry_cleanup_params when: make_telemetry_cleanup_params is defined ansible.builtin.debug: var: make_telemetry_cleanup_params - name: Run telemetry_cleanup retries: "{{ make_telemetry_cleanup_retries | default(omit) }}" delay: "{{ make_telemetry_cleanup_delay | default(omit) }}" until: "{{ make_telemetry_cleanup_until | default(true) }}" register: "make_telemetry_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_cleanup" dry_run: "{{ make_telemetry_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_cleanup_env|default({})), **(make_telemetry_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000176315134411253033416 0ustar zuulzuul--- - name: Debug make_telemetry_deploy_prep_env when: make_telemetry_deploy_prep_env is defined ansible.builtin.debug: var: make_telemetry_deploy_prep_env - name: Debug make_telemetry_deploy_prep_params when: make_telemetry_deploy_prep_params is defined ansible.builtin.debug: var: make_telemetry_deploy_prep_params - name: Run telemetry_deploy_prep retries: "{{ make_telemetry_deploy_prep_retries | default(omit) }}" delay: "{{ make_telemetry_deploy_prep_delay | default(omit) }}" until: "{{ make_telemetry_deploy_prep_until | default(true) }}" register: "make_telemetry_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_deploy_prep" dry_run: "{{ make_telemetry_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_deploy_prep_env|default({})), **(make_telemetry_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000165015134411253033411 0ustar zuulzuul--- - name: Debug make_telemetry_deploy_env when: make_telemetry_deploy_env is defined ansible.builtin.debug: var: make_telemetry_deploy_env - name: Debug make_telemetry_deploy_params when: make_telemetry_deploy_params is defined ansible.builtin.debug: var: make_telemetry_deploy_params - name: Run telemetry_deploy retries: "{{ make_telemetry_deploy_retries | default(omit) }}" delay: "{{ make_telemetry_deploy_delay | default(omit) }}" until: "{{ make_telemetry_deploy_until | default(true) }}" register: "make_telemetry_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_deploy" dry_run: "{{ make_telemetry_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_deploy_env|default({})), **(make_telemetry_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000204015134411253033403 0ustar zuulzuul--- - name: Debug make_telemetry_deploy_cleanup_env when: make_telemetry_deploy_cleanup_env is defined ansible.builtin.debug: var: make_telemetry_deploy_cleanup_env - name: Debug make_telemetry_deploy_cleanup_params when: make_telemetry_deploy_cleanup_params is defined ansible.builtin.debug: var: make_telemetry_deploy_cleanup_params - name: Run telemetry_deploy_cleanup retries: "{{ make_telemetry_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_telemetry_deploy_cleanup_delay | default(omit) }}" until: "{{ make_telemetry_deploy_cleanup_until | default(true) }}" register: "make_telemetry_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_deploy_cleanup" dry_run: "{{ make_telemetry_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_deploy_cleanup_env|default({})), **(make_telemetry_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_kuttl_run.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000172515134411253033414 0ustar zuulzuul--- - name: Debug make_telemetry_kuttl_run_env when: make_telemetry_kuttl_run_env is defined ansible.builtin.debug: var: make_telemetry_kuttl_run_env - name: Debug make_telemetry_kuttl_run_params when: make_telemetry_kuttl_run_params is defined ansible.builtin.debug: var: make_telemetry_kuttl_run_params - name: Run telemetry_kuttl_run retries: "{{ make_telemetry_kuttl_run_retries | default(omit) }}" delay: "{{ make_telemetry_kuttl_run_delay | default(omit) }}" until: "{{ make_telemetry_kuttl_run_until | default(true) }}" register: "make_telemetry_kuttl_run_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_kuttl_run" dry_run: "{{ make_telemetry_kuttl_run_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_kuttl_run_env|default({})), **(make_telemetry_kuttl_run_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetry_kuttl.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_telemetr0000644000175000017500000000163115134411253033410 0ustar zuulzuul--- - name: Debug make_telemetry_kuttl_env when: make_telemetry_kuttl_env is defined ansible.builtin.debug: var: make_telemetry_kuttl_env - name: Debug make_telemetry_kuttl_params when: make_telemetry_kuttl_params is defined ansible.builtin.debug: var: make_telemetry_kuttl_params - name: Run telemetry_kuttl retries: "{{ make_telemetry_kuttl_retries | default(omit) }}" delay: "{{ make_telemetry_kuttl_delay | default(omit) }}" until: "{{ make_telemetry_kuttl_until | default(true) }}" register: "make_telemetry_kuttl_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make telemetry_kuttl" dry_run: "{{ make_telemetry_kuttl_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_telemetry_kuttl_env|default({})), **(make_telemetry_kuttl_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_pr0000644000175000017500000000151615134411253033426 0ustar zuulzuul--- - name: Debug make_swift_prep_env when: make_swift_prep_env is defined ansible.builtin.debug: var: make_swift_prep_env - name: Debug make_swift_prep_params when: make_swift_prep_params is defined ansible.builtin.debug: var: make_swift_prep_params - name: Run swift_prep retries: "{{ make_swift_prep_retries | default(omit) }}" delay: "{{ make_swift_prep_delay | default(omit) }}" until: "{{ make_swift_prep_until | default(true) }}" register: "make_swift_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_prep" dry_run: "{{ make_swift_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_prep_env|default({})), **(make_swift_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift.ym0000644000175000017500000000140315134411253033344 0ustar zuulzuul--- - name: Debug make_swift_env when: make_swift_env is defined ansible.builtin.debug: var: make_swift_env - name: Debug make_swift_params when: make_swift_params is defined ansible.builtin.debug: var: make_swift_params - name: Run swift retries: "{{ make_swift_retries | default(omit) }}" delay: "{{ make_swift_delay | default(omit) }}" until: "{{ make_swift_until | default(true) }}" register: "make_swift_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift" dry_run: "{{ make_swift_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_env|default({})), **(make_swift_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_cl0000644000175000017500000000157315134411253033406 0ustar zuulzuul--- - name: Debug make_swift_cleanup_env when: make_swift_cleanup_env is defined ansible.builtin.debug: var: make_swift_cleanup_env - name: Debug make_swift_cleanup_params when: make_swift_cleanup_params is defined ansible.builtin.debug: var: make_swift_cleanup_params - name: Run swift_cleanup retries: "{{ make_swift_cleanup_retries | default(omit) }}" delay: "{{ make_swift_cleanup_delay | default(omit) }}" until: "{{ make_swift_cleanup_until | default(true) }}" register: "make_swift_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_cleanup" dry_run: "{{ make_swift_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_cleanup_env|default({})), **(make_swift_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_de0000644000175000017500000000166715134411253033404 0ustar zuulzuul--- - name: Debug make_swift_deploy_prep_env when: make_swift_deploy_prep_env is defined ansible.builtin.debug: var: make_swift_deploy_prep_env - name: Debug make_swift_deploy_prep_params when: make_swift_deploy_prep_params is defined ansible.builtin.debug: var: make_swift_deploy_prep_params - name: Run swift_deploy_prep retries: "{{ make_swift_deploy_prep_retries | default(omit) }}" delay: "{{ make_swift_deploy_prep_delay | default(omit) }}" until: "{{ make_swift_deploy_prep_until | default(true) }}" register: "make_swift_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_deploy_prep" dry_run: "{{ make_swift_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_deploy_prep_env|default({})), **(make_swift_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_de0000644000175000017500000000155415134411253033377 0ustar zuulzuul--- - name: Debug make_swift_deploy_env when: make_swift_deploy_env is defined ansible.builtin.debug: var: make_swift_deploy_env - name: Debug make_swift_deploy_params when: make_swift_deploy_params is defined ansible.builtin.debug: var: make_swift_deploy_params - name: Run swift_deploy retries: "{{ make_swift_deploy_retries | default(omit) }}" delay: "{{ make_swift_deploy_delay | default(omit) }}" until: "{{ make_swift_deploy_until | default(true) }}" register: "make_swift_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_deploy" dry_run: "{{ make_swift_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_deploy_env|default({})), **(make_swift_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_swift_de0000644000175000017500000000174415134411253033400 0ustar zuulzuul--- - name: Debug make_swift_deploy_cleanup_env when: make_swift_deploy_cleanup_env is defined ansible.builtin.debug: var: make_swift_deploy_cleanup_env - name: Debug make_swift_deploy_cleanup_params when: make_swift_deploy_cleanup_params is defined ansible.builtin.debug: var: make_swift_deploy_cleanup_params - name: Run swift_deploy_cleanup retries: "{{ make_swift_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_swift_deploy_cleanup_delay | default(omit) }}" until: "{{ make_swift_deploy_cleanup_until | default(true) }}" register: "make_swift_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make swift_deploy_cleanup" dry_run: "{{ make_swift_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_swift_deploy_cleanup_env|default({})), **(make_swift_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmanager.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmana0000644000175000017500000000153515134411253033364 0ustar zuulzuul--- - name: Debug make_certmanager_env when: make_certmanager_env is defined ansible.builtin.debug: var: make_certmanager_env - name: Debug make_certmanager_params when: make_certmanager_params is defined ansible.builtin.debug: var: make_certmanager_params - name: Run certmanager retries: "{{ make_certmanager_retries | default(omit) }}" delay: "{{ make_certmanager_delay | default(omit) }}" until: "{{ make_certmanager_until | default(true) }}" register: "make_certmanager_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make certmanager" dry_run: "{{ make_certmanager_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_certmanager_env|default({})), **(make_certmanager_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmanager_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_certmana0000644000175000017500000000172515134411253033365 0ustar zuulzuul--- - name: Debug make_certmanager_cleanup_env when: make_certmanager_cleanup_env is defined ansible.builtin.debug: var: make_certmanager_cleanup_env - name: Debug make_certmanager_cleanup_params when: make_certmanager_cleanup_params is defined ansible.builtin.debug: var: make_certmanager_cleanup_params - name: Run certmanager_cleanup retries: "{{ make_certmanager_cleanup_retries | default(omit) }}" delay: "{{ make_certmanager_cleanup_delay | default(omit) }}" until: "{{ make_certmanager_cleanup_until | default(true) }}" register: "make_certmanager_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make certmanager_cleanup" dry_run: "{{ make_certmanager_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_certmanager_cleanup_env|default({})), **(make_certmanager_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_validate_marketplace.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_validate0000644000175000017500000000174415134411253033365 0ustar zuulzuul--- - name: Debug make_validate_marketplace_env when: make_validate_marketplace_env is defined ansible.builtin.debug: var: make_validate_marketplace_env - name: Debug make_validate_marketplace_params when: make_validate_marketplace_params is defined ansible.builtin.debug: var: make_validate_marketplace_params - name: Run validate_marketplace retries: "{{ make_validate_marketplace_retries | default(omit) }}" delay: "{{ make_validate_marketplace_delay | default(omit) }}" until: "{{ make_validate_marketplace_until | default(true) }}" register: "make_validate_marketplace_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make validate_marketplace" dry_run: "{{ make_validate_marketplace_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_validate_marketplace_env|default({})), **(make_validate_marketplace_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_deploy_prep.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_de0000644000175000017500000000166715134411253033356 0ustar zuulzuul--- - name: Debug make_redis_deploy_prep_env when: make_redis_deploy_prep_env is defined ansible.builtin.debug: var: make_redis_deploy_prep_env - name: Debug make_redis_deploy_prep_params when: make_redis_deploy_prep_params is defined ansible.builtin.debug: var: make_redis_deploy_prep_params - name: Run redis_deploy_prep retries: "{{ make_redis_deploy_prep_retries | default(omit) }}" delay: "{{ make_redis_deploy_prep_delay | default(omit) }}" until: "{{ make_redis_deploy_prep_until | default(true) }}" register: "make_redis_deploy_prep_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make redis_deploy_prep" dry_run: "{{ make_redis_deploy_prep_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_redis_deploy_prep_env|default({})), **(make_redis_deploy_prep_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_de0000644000175000017500000000155415134411253033351 0ustar zuulzuul--- - name: Debug make_redis_deploy_env when: make_redis_deploy_env is defined ansible.builtin.debug: var: make_redis_deploy_env - name: Debug make_redis_deploy_params when: make_redis_deploy_params is defined ansible.builtin.debug: var: make_redis_deploy_params - name: Run redis_deploy retries: "{{ make_redis_deploy_retries | default(omit) }}" delay: "{{ make_redis_deploy_delay | default(omit) }}" until: "{{ make_redis_deploy_until | default(true) }}" register: "make_redis_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make redis_deploy" dry_run: "{{ make_redis_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_redis_deploy_env|default({})), **(make_redis_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_deploy_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_redis_de0000644000175000017500000000174415134411253033352 0ustar zuulzuul--- - name: Debug make_redis_deploy_cleanup_env when: make_redis_deploy_cleanup_env is defined ansible.builtin.debug: var: make_redis_deploy_cleanup_env - name: Debug make_redis_deploy_cleanup_params when: make_redis_deploy_cleanup_params is defined ansible.builtin.debug: var: make_redis_deploy_cleanup_params - name: Run redis_deploy_cleanup retries: "{{ make_redis_deploy_cleanup_retries | default(omit) }}" delay: "{{ make_redis_deploy_cleanup_delay | default(omit) }}" until: "{{ make_redis_deploy_cleanup_until | default(true) }}" register: "make_redis_deploy_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make redis_deploy_cleanup" dry_run: "{{ make_redis_deploy_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_redis_deploy_cleanup_env|default({})), **(make_redis_deploy_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_set_slower_etcd_profile.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_set_slow0000644000175000017500000000202115134411253033420 0ustar zuulzuul--- - name: Debug make_set_slower_etcd_profile_env when: make_set_slower_etcd_profile_env is defined ansible.builtin.debug: var: make_set_slower_etcd_profile_env - name: Debug make_set_slower_etcd_profile_params when: make_set_slower_etcd_profile_params is defined ansible.builtin.debug: var: make_set_slower_etcd_profile_params - name: Run set_slower_etcd_profile retries: "{{ make_set_slower_etcd_profile_retries | default(omit) }}" delay: "{{ make_set_slower_etcd_profile_delay | default(omit) }}" until: "{{ make_set_slower_etcd_profile_until | default(true) }}" register: "make_set_slower_etcd_profile_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls" script: "make set_slower_etcd_profile" dry_run: "{{ make_set_slower_etcd_profile_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_set_slower_etcd_profile_env|default({})), **(make_set_slower_etcd_profile_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_download_tools.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_download0000644000175000017500000000162315134411253033377 0ustar zuulzuul--- - name: Debug make_download_tools_env when: make_download_tools_env is defined ansible.builtin.debug: var: make_download_tools_env - name: Debug make_download_tools_params when: make_download_tools_params is defined ansible.builtin.debug: var: make_download_tools_params - name: Run download_tools retries: "{{ make_download_tools_retries | default(omit) }}" delay: "{{ make_download_tools_delay | default(omit) }}" until: "{{ make_download_tools_until | default(true) }}" register: "make_download_tools_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make download_tools" dry_run: "{{ make_download_tools_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_download_tools_env|default({})), **(make_download_tools_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nfs.yml0000644000175000017500000000135615134411253033161 0ustar zuulzuul--- - name: Debug make_nfs_env when: make_nfs_env is defined ansible.builtin.debug: var: make_nfs_env - name: Debug make_nfs_params when: make_nfs_params is defined ansible.builtin.debug: var: make_nfs_params - name: Run nfs retries: "{{ make_nfs_retries | default(omit) }}" delay: "{{ make_nfs_delay | default(omit) }}" until: "{{ make_nfs_until | default(true) }}" register: "make_nfs_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make nfs" dry_run: "{{ make_nfs_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nfs_env|default({})), **(make_nfs_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nfs_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_nfs_clea0000644000175000017500000000154615134411253033346 0ustar zuulzuul--- - name: Debug make_nfs_cleanup_env when: make_nfs_cleanup_env is defined ansible.builtin.debug: var: make_nfs_cleanup_env - name: Debug make_nfs_cleanup_params when: make_nfs_cleanup_params is defined ansible.builtin.debug: var: make_nfs_cleanup_params - name: Run nfs_cleanup retries: "{{ make_nfs_cleanup_retries | default(omit) }}" delay: "{{ make_nfs_cleanup_delay | default(omit) }}" until: "{{ make_nfs_cleanup_until | default(true) }}" register: "make_nfs_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make nfs_cleanup" dry_run: "{{ make_nfs_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_nfs_cleanup_env|default({})), **(make_nfs_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc.yml0000644000175000017500000000135615134411253033142 0ustar zuulzuul--- - name: Debug make_crc_env when: make_crc_env is defined ansible.builtin.debug: var: make_crc_env - name: Debug make_crc_params when: make_crc_params is defined ansible.builtin.debug: var: make_crc_params - name: Run crc retries: "{{ make_crc_retries | default(omit) }}" delay: "{{ make_crc_delay | default(omit) }}" until: "{{ make_crc_until | default(true) }}" register: "make_crc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc" dry_run: "{{ make_crc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_env|default({})), **(make_crc_params|default({}))) }}" ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_clea0000644000175000017500000000154615134411253033327 0ustar zuulzuul--- - name: Debug make_crc_cleanup_env when: make_crc_cleanup_env is defined ansible.builtin.debug: var: make_crc_cleanup_env - name: Debug make_crc_cleanup_params when: make_crc_cleanup_params is defined ansible.builtin.debug: var: make_crc_cleanup_params - name: Run crc_cleanup retries: "{{ make_crc_cleanup_retries | default(omit) }}" delay: "{{ make_crc_cleanup_delay | default(omit) }}" until: "{{ make_crc_cleanup_until | default(true) }}" register: "make_crc_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_cleanup" dry_run: "{{ make_crc_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_cleanup_env|default({})), **(make_crc_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_scrub.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_scru0000644000175000017500000000151015134411253033366 0ustar zuulzuul--- - name: Debug make_crc_scrub_env when: make_crc_scrub_env is defined ansible.builtin.debug: var: make_crc_scrub_env - name: Debug make_crc_scrub_params when: make_crc_scrub_params is defined ansible.builtin.debug: var: make_crc_scrub_params - name: Run crc_scrub retries: "{{ make_crc_scrub_retries | default(omit) }}" delay: "{{ make_crc_scrub_delay | default(omit) }}" until: "{{ make_crc_scrub_until | default(true) }}" register: "make_crc_scrub_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_scrub" dry_run: "{{ make_crc_scrub_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_scrub_env|default({})), **(make_crc_scrub_params|default({}))) }}" ././@LongLink0000644000000000000000000000017500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_attach_default_interface.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_atta0000644000175000017500000000214515134411253033350 0ustar zuulzuul--- - name: Debug make_crc_attach_default_interface_env when: make_crc_attach_default_interface_env is defined ansible.builtin.debug: var: make_crc_attach_default_interface_env - name: Debug make_crc_attach_default_interface_params when: make_crc_attach_default_interface_params is defined ansible.builtin.debug: var: make_crc_attach_default_interface_params - name: Run crc_attach_default_interface retries: "{{ make_crc_attach_default_interface_retries | default(omit) }}" delay: "{{ make_crc_attach_default_interface_delay | default(omit) }}" until: "{{ make_crc_attach_default_interface_until | default(true) }}" register: "make_crc_attach_default_interface_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_attach_default_interface" dry_run: "{{ make_crc_attach_default_interface_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_attach_default_interface_env|default({})), **(make_crc_attach_default_interface_params|default({}))) }}" ././@LongLink0000644000000000000000000000020500000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_attach_default_interface_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_crc_atta0000644000175000017500000000233515134411253033351 0ustar zuulzuul--- - name: Debug make_crc_attach_default_interface_cleanup_env when: make_crc_attach_default_interface_cleanup_env is defined ansible.builtin.debug: var: make_crc_attach_default_interface_cleanup_env - name: Debug make_crc_attach_default_interface_cleanup_params when: make_crc_attach_default_interface_cleanup_params is defined ansible.builtin.debug: var: make_crc_attach_default_interface_cleanup_params - name: Run crc_attach_default_interface_cleanup retries: "{{ make_crc_attach_default_interface_cleanup_retries | default(omit) }}" delay: "{{ make_crc_attach_default_interface_cleanup_delay | default(omit) }}" until: "{{ make_crc_attach_default_interface_cleanup_until | default(true) }}" register: "make_crc_attach_default_interface_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make crc_attach_default_interface_cleanup" dry_run: "{{ make_crc_attach_default_interface_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_crc_attach_default_interface_cleanup_env|default({})), **(make_crc_attach_default_interface_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_network.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000166115134411253033274 0ustar zuulzuul--- - name: Debug make_ipv6_lab_network_env when: make_ipv6_lab_network_env is defined ansible.builtin.debug: var: make_ipv6_lab_network_env - name: Debug make_ipv6_lab_network_params when: make_ipv6_lab_network_params is defined ansible.builtin.debug: var: make_ipv6_lab_network_params - name: Run ipv6_lab_network retries: "{{ make_ipv6_lab_network_retries | default(omit) }}" delay: "{{ make_ipv6_lab_network_delay | default(omit) }}" until: "{{ make_ipv6_lab_network_until | default(true) }}" register: "make_ipv6_lab_network_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_network" dry_run: "{{ make_ipv6_lab_network_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_network_env|default({})), **(make_ipv6_lab_network_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_network_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000205115134411253033266 0ustar zuulzuul--- - name: Debug make_ipv6_lab_network_cleanup_env when: make_ipv6_lab_network_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_network_cleanup_env - name: Debug make_ipv6_lab_network_cleanup_params when: make_ipv6_lab_network_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_network_cleanup_params - name: Run ipv6_lab_network_cleanup retries: "{{ make_ipv6_lab_network_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_network_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_network_cleanup_until | default(true) }}" register: "make_ipv6_lab_network_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_network_cleanup" dry_run: "{{ make_ipv6_lab_network_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_network_cleanup_env|default({})), **(make_ipv6_lab_network_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_nat64_router.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000177415134411253033301 0ustar zuulzuul--- - name: Debug make_ipv6_lab_nat64_router_env when: make_ipv6_lab_nat64_router_env is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_env - name: Debug make_ipv6_lab_nat64_router_params when: make_ipv6_lab_nat64_router_params is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_params - name: Run ipv6_lab_nat64_router retries: "{{ make_ipv6_lab_nat64_router_retries | default(omit) }}" delay: "{{ make_ipv6_lab_nat64_router_delay | default(omit) }}" until: "{{ make_ipv6_lab_nat64_router_until | default(true) }}" register: "make_ipv6_lab_nat64_router_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_nat64_router" dry_run: "{{ make_ipv6_lab_nat64_router_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_nat64_router_env|default({})), **(make_ipv6_lab_nat64_router_params|default({}))) }}" ././@LongLink0000644000000000000000000000017600000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_nat64_router_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000216415134411253033273 0ustar zuulzuul--- - name: Debug make_ipv6_lab_nat64_router_cleanup_env when: make_ipv6_lab_nat64_router_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_cleanup_env - name: Debug make_ipv6_lab_nat64_router_cleanup_params when: make_ipv6_lab_nat64_router_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_nat64_router_cleanup_params - name: Run ipv6_lab_nat64_router_cleanup retries: "{{ make_ipv6_lab_nat64_router_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_nat64_router_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_nat64_router_cleanup_until | default(true) }}" register: "make_ipv6_lab_nat64_router_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_nat64_router_cleanup" dry_run: "{{ make_ipv6_lab_nat64_router_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_nat64_router_cleanup_env|default({})), **(make_ipv6_lab_nat64_router_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_sno.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000156515134411253033277 0ustar zuulzuul--- - name: Debug make_ipv6_lab_sno_env when: make_ipv6_lab_sno_env is defined ansible.builtin.debug: var: make_ipv6_lab_sno_env - name: Debug make_ipv6_lab_sno_params when: make_ipv6_lab_sno_params is defined ansible.builtin.debug: var: make_ipv6_lab_sno_params - name: Run ipv6_lab_sno retries: "{{ make_ipv6_lab_sno_retries | default(omit) }}" delay: "{{ make_ipv6_lab_sno_delay | default(omit) }}" until: "{{ make_ipv6_lab_sno_until | default(true) }}" register: "make_ipv6_lab_sno_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_sno" dry_run: "{{ make_ipv6_lab_sno_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_sno_env|default({})), **(make_ipv6_lab_sno_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_sno_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000175515134411253033300 0ustar zuulzuul--- - name: Debug make_ipv6_lab_sno_cleanup_env when: make_ipv6_lab_sno_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_sno_cleanup_env - name: Debug make_ipv6_lab_sno_cleanup_params when: make_ipv6_lab_sno_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_sno_cleanup_params - name: Run ipv6_lab_sno_cleanup retries: "{{ make_ipv6_lab_sno_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_sno_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_sno_cleanup_until | default(true) }}" register: "make_ipv6_lab_sno_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_sno_cleanup" dry_run: "{{ make_ipv6_lab_sno_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_sno_cleanup_env|default({})), **(make_ipv6_lab_sno_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000147115134411253033273 0ustar zuulzuul--- - name: Debug make_ipv6_lab_env when: make_ipv6_lab_env is defined ansible.builtin.debug: var: make_ipv6_lab_env - name: Debug make_ipv6_lab_params when: make_ipv6_lab_params is defined ansible.builtin.debug: var: make_ipv6_lab_params - name: Run ipv6_lab retries: "{{ make_ipv6_lab_retries | default(omit) }}" delay: "{{ make_ipv6_lab_delay | default(omit) }}" until: "{{ make_ipv6_lab_until | default(true) }}" register: "make_ipv6_lab_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab" dry_run: "{{ make_ipv6_lab_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_env|default({})), **(make_ipv6_lab_params|default({}))) }}" ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_ipv6_lab0000644000175000017500000000166115134411253033274 0ustar zuulzuul--- - name: Debug make_ipv6_lab_cleanup_env when: make_ipv6_lab_cleanup_env is defined ansible.builtin.debug: var: make_ipv6_lab_cleanup_env - name: Debug make_ipv6_lab_cleanup_params when: make_ipv6_lab_cleanup_params is defined ansible.builtin.debug: var: make_ipv6_lab_cleanup_params - name: Run ipv6_lab_cleanup retries: "{{ make_ipv6_lab_cleanup_retries | default(omit) }}" delay: "{{ make_ipv6_lab_cleanup_delay | default(omit) }}" until: "{{ make_ipv6_lab_cleanup_until | default(true) }}" register: "make_ipv6_lab_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make ipv6_lab_cleanup" dry_run: "{{ make_ipv6_lab_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_ipv6_lab_cleanup_env|default({})), **(make_ipv6_lab_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_default_interface.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_d0000644000175000017500000000205115134411253033333 0ustar zuulzuul--- - name: Debug make_attach_default_interface_env when: make_attach_default_interface_env is defined ansible.builtin.debug: var: make_attach_default_interface_env - name: Debug make_attach_default_interface_params when: make_attach_default_interface_params is defined ansible.builtin.debug: var: make_attach_default_interface_params - name: Run attach_default_interface retries: "{{ make_attach_default_interface_retries | default(omit) }}" delay: "{{ make_attach_default_interface_delay | default(omit) }}" until: "{{ make_attach_default_interface_until | default(true) }}" register: "make_attach_default_interface_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make attach_default_interface" dry_run: "{{ make_attach_default_interface_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_attach_default_interface_env|default({})), **(make_attach_default_interface_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_default_interface_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_attach_d0000644000175000017500000000224115134411253033334 0ustar zuulzuul--- - name: Debug make_attach_default_interface_cleanup_env when: make_attach_default_interface_cleanup_env is defined ansible.builtin.debug: var: make_attach_default_interface_cleanup_env - name: Debug make_attach_default_interface_cleanup_params when: make_attach_default_interface_cleanup_params is defined ansible.builtin.debug: var: make_attach_default_interface_cleanup_params - name: Run attach_default_interface_cleanup retries: "{{ make_attach_default_interface_cleanup_retries | default(omit) }}" delay: "{{ make_attach_default_interface_cleanup_delay | default(omit) }}" until: "{{ make_attach_default_interface_cleanup_until | default(true) }}" register: "make_attach_default_interface_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make attach_default_interface_cleanup" dry_run: "{{ make_attach_default_interface_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_attach_default_interface_cleanup_env|default({})), **(make_attach_default_interface_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_isolation_bridge.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_0000644000175000017500000000205115134411253033414 0ustar zuulzuul--- - name: Debug make_network_isolation_bridge_env when: make_network_isolation_bridge_env is defined ansible.builtin.debug: var: make_network_isolation_bridge_env - name: Debug make_network_isolation_bridge_params when: make_network_isolation_bridge_params is defined ansible.builtin.debug: var: make_network_isolation_bridge_params - name: Run network_isolation_bridge retries: "{{ make_network_isolation_bridge_retries | default(omit) }}" delay: "{{ make_network_isolation_bridge_delay | default(omit) }}" until: "{{ make_network_isolation_bridge_until | default(true) }}" register: "make_network_isolation_bridge_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make network_isolation_bridge" dry_run: "{{ make_network_isolation_bridge_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_network_isolation_bridge_env|default({})), **(make_network_isolation_bridge_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_isolation_bridge_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_network_0000644000175000017500000000224115134411253033415 0ustar zuulzuul--- - name: Debug make_network_isolation_bridge_cleanup_env when: make_network_isolation_bridge_cleanup_env is defined ansible.builtin.debug: var: make_network_isolation_bridge_cleanup_env - name: Debug make_network_isolation_bridge_cleanup_params when: make_network_isolation_bridge_cleanup_params is defined ansible.builtin.debug: var: make_network_isolation_bridge_cleanup_params - name: Run network_isolation_bridge_cleanup retries: "{{ make_network_isolation_bridge_cleanup_retries | default(omit) }}" delay: "{{ make_network_isolation_bridge_cleanup_delay | default(omit) }}" until: "{{ make_network_isolation_bridge_cleanup_until | default(true) }}" register: "make_network_isolation_bridge_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make network_isolation_bridge_cleanup" dry_run: "{{ make_network_isolation_bridge_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_network_isolation_bridge_cleanup_env|default({})), **(make_network_isolation_bridge_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_baremetal_compute.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_bar0000644000175000017500000000201315134411253033333 0ustar zuulzuul--- - name: Debug make_edpm_baremetal_compute_env when: make_edpm_baremetal_compute_env is defined ansible.builtin.debug: var: make_edpm_baremetal_compute_env - name: Debug make_edpm_baremetal_compute_params when: make_edpm_baremetal_compute_params is defined ansible.builtin.debug: var: make_edpm_baremetal_compute_params - name: Run edpm_baremetal_compute retries: "{{ make_edpm_baremetal_compute_retries | default(omit) }}" delay: "{{ make_edpm_baremetal_compute_delay | default(omit) }}" until: "{{ make_edpm_baremetal_compute_until | default(true) }}" register: "make_edpm_baremetal_compute_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_baremetal_compute" dry_run: "{{ make_edpm_baremetal_compute_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_baremetal_compute_env|default({})), **(make_edpm_baremetal_compute_params|default({}))) }}" ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000156515134411253033360 0ustar zuulzuul--- - name: Debug make_edpm_compute_env when: make_edpm_compute_env is defined ansible.builtin.debug: var: make_edpm_compute_env - name: Debug make_edpm_compute_params when: make_edpm_compute_params is defined ansible.builtin.debug: var: make_edpm_compute_params - name: Run edpm_compute retries: "{{ make_edpm_compute_retries | default(omit) }}" delay: "{{ make_edpm_compute_delay | default(omit) }}" until: "{{ make_edpm_compute_until | default(true) }}" register: "make_edpm_compute_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute" dry_run: "{{ make_edpm_compute_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_env|default({})), **(make_edpm_compute_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute_bootc.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000171715134411253033357 0ustar zuulzuul--- - name: Debug make_edpm_compute_bootc_env when: make_edpm_compute_bootc_env is defined ansible.builtin.debug: var: make_edpm_compute_bootc_env - name: Debug make_edpm_compute_bootc_params when: make_edpm_compute_bootc_params is defined ansible.builtin.debug: var: make_edpm_compute_bootc_params - name: Run edpm_compute_bootc retries: "{{ make_edpm_compute_bootc_retries | default(omit) }}" delay: "{{ make_edpm_compute_bootc_delay | default(omit) }}" until: "{{ make_edpm_compute_bootc_until | default(true) }}" register: "make_edpm_compute_bootc_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute_bootc" dry_run: "{{ make_edpm_compute_bootc_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_bootc_env|default({})), **(make_edpm_compute_bootc_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_ansible_runner.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_ans0000644000175000017500000000173615134411253033363 0ustar zuulzuul--- - name: Debug make_edpm_ansible_runner_env when: make_edpm_ansible_runner_env is defined ansible.builtin.debug: var: make_edpm_ansible_runner_env - name: Debug make_edpm_ansible_runner_params when: make_edpm_ansible_runner_params is defined ansible.builtin.debug: var: make_edpm_ansible_runner_params - name: Run edpm_ansible_runner retries: "{{ make_edpm_ansible_runner_retries | default(omit) }}" delay: "{{ make_edpm_ansible_runner_delay | default(omit) }}" until: "{{ make_edpm_ansible_runner_until | default(true) }}" register: "make_edpm_ansible_runner_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_ansible_runner" dry_run: "{{ make_edpm_ansible_runner_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_ansible_runner_env|default({})), **(make_edpm_ansible_runner_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_computes_bgp.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000170015134411253033347 0ustar zuulzuul--- - name: Debug make_edpm_computes_bgp_env when: make_edpm_computes_bgp_env is defined ansible.builtin.debug: var: make_edpm_computes_bgp_env - name: Debug make_edpm_computes_bgp_params when: make_edpm_computes_bgp_params is defined ansible.builtin.debug: var: make_edpm_computes_bgp_params - name: Run edpm_computes_bgp retries: "{{ make_edpm_computes_bgp_retries | default(omit) }}" delay: "{{ make_edpm_computes_bgp_delay | default(omit) }}" until: "{{ make_edpm_computes_bgp_until | default(true) }}" register: "make_edpm_computes_bgp_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_computes_bgp" dry_run: "{{ make_edpm_computes_bgp_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_computes_bgp_env|default({})), **(make_edpm_computes_bgp_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute_repos.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000171715134411253033357 0ustar zuulzuul--- - name: Debug make_edpm_compute_repos_env when: make_edpm_compute_repos_env is defined ansible.builtin.debug: var: make_edpm_compute_repos_env - name: Debug make_edpm_compute_repos_params when: make_edpm_compute_repos_params is defined ansible.builtin.debug: var: make_edpm_compute_repos_params - name: Run edpm_compute_repos retries: "{{ make_edpm_compute_repos_retries | default(omit) }}" delay: "{{ make_edpm_compute_repos_delay | default(omit) }}" until: "{{ make_edpm_compute_repos_until | default(true) }}" register: "make_edpm_compute_repos_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute_repos" dry_run: "{{ make_edpm_compute_repos_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_repos_env|default({})), **(make_edpm_compute_repos_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_compute_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_com0000644000175000017500000000175515134411253033361 0ustar zuulzuul--- - name: Debug make_edpm_compute_cleanup_env when: make_edpm_compute_cleanup_env is defined ansible.builtin.debug: var: make_edpm_compute_cleanup_env - name: Debug make_edpm_compute_cleanup_params when: make_edpm_compute_cleanup_params is defined ansible.builtin.debug: var: make_edpm_compute_cleanup_params - name: Run edpm_compute_cleanup retries: "{{ make_edpm_compute_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_compute_cleanup_delay | default(omit) }}" until: "{{ make_edpm_compute_cleanup_until | default(true) }}" register: "make_edpm_compute_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_compute_cleanup" dry_run: "{{ make_edpm_compute_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_compute_cleanup_env|default({})), **(make_edpm_compute_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_networker.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_net0000644000175000017500000000162315134411253033363 0ustar zuulzuul--- - name: Debug make_edpm_networker_env when: make_edpm_networker_env is defined ansible.builtin.debug: var: make_edpm_networker_env - name: Debug make_edpm_networker_params when: make_edpm_networker_params is defined ansible.builtin.debug: var: make_edpm_networker_params - name: Run edpm_networker retries: "{{ make_edpm_networker_retries | default(omit) }}" delay: "{{ make_edpm_networker_delay | default(omit) }}" until: "{{ make_edpm_networker_until | default(true) }}" register: "make_edpm_networker_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_networker" dry_run: "{{ make_edpm_networker_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_networker_env|default({})), **(make_edpm_networker_params|default({}))) }}" ././@LongLink0000644000000000000000000000016700000000000011607 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_networker_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_net0000644000175000017500000000201315134411253033355 0ustar zuulzuul--- - name: Debug make_edpm_networker_cleanup_env when: make_edpm_networker_cleanup_env is defined ansible.builtin.debug: var: make_edpm_networker_cleanup_env - name: Debug make_edpm_networker_cleanup_params when: make_edpm_networker_cleanup_params is defined ansible.builtin.debug: var: make_edpm_networker_cleanup_params - name: Run edpm_networker_cleanup retries: "{{ make_edpm_networker_cleanup_retries | default(omit) }}" delay: "{{ make_edpm_networker_cleanup_delay | default(omit) }}" until: "{{ make_edpm_networker_cleanup_until | default(true) }}" register: "make_edpm_networker_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_networker_cleanup" dry_run: "{{ make_edpm_networker_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_networker_cleanup_env|default({})), **(make_edpm_networker_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_deploy_instance.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_edpm_dep0000644000175000017500000000175515134411253033353 0ustar zuulzuul--- - name: Debug make_edpm_deploy_instance_env when: make_edpm_deploy_instance_env is defined ansible.builtin.debug: var: make_edpm_deploy_instance_env - name: Debug make_edpm_deploy_instance_params when: make_edpm_deploy_instance_params is defined ansible.builtin.debug: var: make_edpm_deploy_instance_params - name: Run edpm_deploy_instance retries: "{{ make_edpm_deploy_instance_retries | default(omit) }}" delay: "{{ make_edpm_deploy_instance_delay | default(omit) }}" until: "{{ make_edpm_deploy_instance_until | default(true) }}" register: "make_edpm_deploy_instance_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make edpm_deploy_instance" dry_run: "{{ make_edpm_deploy_instance_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_edpm_deploy_instance_env|default({})), **(make_edpm_deploy_instance_params|default({}))) }}" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_tripleo_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_tripleo_0000644000175000017500000000162315134411253033405 0ustar zuulzuul--- - name: Debug make_tripleo_deploy_env when: make_tripleo_deploy_env is defined ansible.builtin.debug: var: make_tripleo_deploy_env - name: Debug make_tripleo_deploy_params when: make_tripleo_deploy_params is defined ansible.builtin.debug: var: make_tripleo_deploy_params - name: Run tripleo_deploy retries: "{{ make_tripleo_deploy_retries | default(omit) }}" delay: "{{ make_tripleo_deploy_delay | default(omit) }}" until: "{{ make_tripleo_deploy_until | default(true) }}" register: "make_tripleo_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make tripleo_deploy" dry_run: "{{ make_tripleo_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_tripleo_deploy_env|default({})), **(make_tripleo_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_deploy.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000170015134411253033371 0ustar zuulzuul--- - name: Debug make_standalone_deploy_env when: make_standalone_deploy_env is defined ansible.builtin.debug: var: make_standalone_deploy_env - name: Debug make_standalone_deploy_params when: make_standalone_deploy_params is defined ansible.builtin.debug: var: make_standalone_deploy_params - name: Run standalone_deploy retries: "{{ make_standalone_deploy_retries | default(omit) }}" delay: "{{ make_standalone_deploy_delay | default(omit) }}" until: "{{ make_standalone_deploy_until | default(true) }}" register: "make_standalone_deploy_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_deploy" dry_run: "{{ make_standalone_deploy_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_deploy_env|default({})), **(make_standalone_deploy_params|default({}))) }}" ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_sync.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000164215134411253033376 0ustar zuulzuul--- - name: Debug make_standalone_sync_env when: make_standalone_sync_env is defined ansible.builtin.debug: var: make_standalone_sync_env - name: Debug make_standalone_sync_params when: make_standalone_sync_params is defined ansible.builtin.debug: var: make_standalone_sync_params - name: Run standalone_sync retries: "{{ make_standalone_sync_retries | default(omit) }}" delay: "{{ make_standalone_sync_delay | default(omit) }}" until: "{{ make_standalone_sync_until | default(true) }}" register: "make_standalone_sync_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_sync" dry_run: "{{ make_standalone_sync_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_sync_env|default({})), **(make_standalone_sync_params|default({}))) }}" ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000152715134411253033400 0ustar zuulzuul--- - name: Debug make_standalone_env when: make_standalone_env is defined ansible.builtin.debug: var: make_standalone_env - name: Debug make_standalone_params when: make_standalone_params is defined ansible.builtin.debug: var: make_standalone_params - name: Run standalone retries: "{{ make_standalone_retries | default(omit) }}" delay: "{{ make_standalone_delay | default(omit) }}" until: "{{ make_standalone_until | default(true) }}" register: "make_standalone_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone" dry_run: "{{ make_standalone_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_env|default({})), **(make_standalone_params|default({}))) }}" ././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000171715134411253033401 0ustar zuulzuul--- - name: Debug make_standalone_cleanup_env when: make_standalone_cleanup_env is defined ansible.builtin.debug: var: make_standalone_cleanup_env - name: Debug make_standalone_cleanup_params when: make_standalone_cleanup_params is defined ansible.builtin.debug: var: make_standalone_cleanup_params - name: Run standalone_cleanup retries: "{{ make_standalone_cleanup_retries | default(omit) }}" delay: "{{ make_standalone_cleanup_delay | default(omit) }}" until: "{{ make_standalone_cleanup_until | default(true) }}" register: "make_standalone_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_cleanup" dry_run: "{{ make_standalone_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_cleanup_env|default({})), **(make_standalone_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_snapshot.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000173615134411253033402 0ustar zuulzuul--- - name: Debug make_standalone_snapshot_env when: make_standalone_snapshot_env is defined ansible.builtin.debug: var: make_standalone_snapshot_env - name: Debug make_standalone_snapshot_params when: make_standalone_snapshot_params is defined ansible.builtin.debug: var: make_standalone_snapshot_params - name: Run standalone_snapshot retries: "{{ make_standalone_snapshot_retries | default(omit) }}" delay: "{{ make_standalone_snapshot_delay | default(omit) }}" until: "{{ make_standalone_snapshot_until | default(true) }}" register: "make_standalone_snapshot_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_snapshot" dry_run: "{{ make_standalone_snapshot_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_snapshot_env|default({})), **(make_standalone_snapshot_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalone_revert.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_standalo0000644000175000017500000000170015134411253033371 0ustar zuulzuul--- - name: Debug make_standalone_revert_env when: make_standalone_revert_env is defined ansible.builtin.debug: var: make_standalone_revert_env - name: Debug make_standalone_revert_params when: make_standalone_revert_params is defined ansible.builtin.debug: var: make_standalone_revert_params - name: Run standalone_revert retries: "{{ make_standalone_revert_retries | default(omit) }}" delay: "{{ make_standalone_revert_delay | default(omit) }}" until: "{{ make_standalone_revert_until | default(true) }}" register: "make_standalone_revert_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make standalone_revert" dry_run: "{{ make_standalone_revert_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_standalone_revert_env|default({})), **(make_standalone_revert_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_prepare.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_pr0000644000175000017500000000160415134411253033375 0ustar zuulzuul--- - name: Debug make_cifmw_prepare_env when: make_cifmw_prepare_env is defined ansible.builtin.debug: var: make_cifmw_prepare_env - name: Debug make_cifmw_prepare_params when: make_cifmw_prepare_params is defined ansible.builtin.debug: var: make_cifmw_prepare_params - name: Run cifmw_prepare retries: "{{ make_cifmw_prepare_retries | default(omit) }}" delay: "{{ make_cifmw_prepare_delay | default(omit) }}" until: "{{ make_cifmw_prepare_until | default(true) }}" register: "make_cifmw_prepare_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make cifmw_prepare" dry_run: "{{ make_cifmw_prepare_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cifmw_prepare_env|default({})), **(make_cifmw_prepare_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_cifmw_cl0000644000175000017500000000160415134411253033352 0ustar zuulzuul--- - name: Debug make_cifmw_cleanup_env when: make_cifmw_cleanup_env is defined ansible.builtin.debug: var: make_cifmw_cleanup_env - name: Debug make_cifmw_cleanup_params when: make_cifmw_cleanup_params is defined ansible.builtin.debug: var: make_cifmw_cleanup_params - name: Run cifmw_cleanup retries: "{{ make_cifmw_cleanup_retries | default(omit) }}" delay: "{{ make_cifmw_cleanup_delay | default(omit) }}" until: "{{ make_cifmw_cleanup_until | default(true) }}" register: "make_cifmw_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make cifmw_cleanup" dry_run: "{{ make_cifmw_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_cifmw_cleanup_env|default({})), **(make_cifmw_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_network.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ne0000644000175000017500000000160415134411253033334 0ustar zuulzuul--- - name: Debug make_bmaas_network_env when: make_bmaas_network_env is defined ansible.builtin.debug: var: make_bmaas_network_env - name: Debug make_bmaas_network_params when: make_bmaas_network_params is defined ansible.builtin.debug: var: make_bmaas_network_params - name: Run bmaas_network retries: "{{ make_bmaas_network_retries | default(omit) }}" delay: "{{ make_bmaas_network_delay | default(omit) }}" until: "{{ make_bmaas_network_until | default(true) }}" register: "make_bmaas_network_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_network" dry_run: "{{ make_bmaas_network_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_network_env|default({})), **(make_bmaas_network_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_network_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ne0000644000175000017500000000177415134411253033344 0ustar zuulzuul--- - name: Debug make_bmaas_network_cleanup_env when: make_bmaas_network_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_network_cleanup_env - name: Debug make_bmaas_network_cleanup_params when: make_bmaas_network_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_network_cleanup_params - name: Run bmaas_network_cleanup retries: "{{ make_bmaas_network_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_network_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_network_cleanup_until | default(true) }}" register: "make_bmaas_network_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_network_cleanup" dry_run: "{{ make_bmaas_network_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_network_cleanup_env|default({})), **(make_bmaas_network_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000020700000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_route_crc_and_crc_bmaas_networks.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ro0000644000175000017500000000237315134411253033356 0ustar zuulzuul--- - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_env when: make_bmaas_route_crc_and_crc_bmaas_networks_env is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_env - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_params when: make_bmaas_route_crc_and_crc_bmaas_networks_params is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_params - name: Run bmaas_route_crc_and_crc_bmaas_networks retries: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_retries | default(omit) }}" delay: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_delay | default(omit) }}" until: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_until | default(true) }}" register: "make_bmaas_route_crc_and_crc_bmaas_networks_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_route_crc_and_crc_bmaas_networks" dry_run: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_route_crc_and_crc_bmaas_networks_env|default({})), **(make_bmaas_route_crc_and_crc_bmaas_networks_params|default({}))) }}" ././@LongLink0000644000000000000000000000021700000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_route_crc_and_crc_bmaas_networks_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ro0000644000175000017500000000256315134411253033357 0ustar zuulzuul--- - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env when: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env - name: Debug make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params when: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params - name: Run bmaas_route_crc_and_crc_bmaas_networks_cleanup retries: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_until | default(true) }}" register: "make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_route_crc_and_crc_bmaas_networks_cleanup" dry_run: "{{ make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_env|default({})), **(make_bmaas_route_crc_and_crc_bmaas_networks_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_metallb.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_me0000644000175000017500000000160415134411253033333 0ustar zuulzuul--- - name: Debug make_bmaas_metallb_env when: make_bmaas_metallb_env is defined ansible.builtin.debug: var: make_bmaas_metallb_env - name: Debug make_bmaas_metallb_params when: make_bmaas_metallb_params is defined ansible.builtin.debug: var: make_bmaas_metallb_params - name: Run bmaas_metallb retries: "{{ make_bmaas_metallb_retries | default(omit) }}" delay: "{{ make_bmaas_metallb_delay | default(omit) }}" until: "{{ make_bmaas_metallb_until | default(true) }}" register: "make_bmaas_metallb_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_metallb" dry_run: "{{ make_bmaas_metallb_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_metallb_env|default({})), **(make_bmaas_metallb_params|default({}))) }}" ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_attach_network.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000205115134411253033333 0ustar zuulzuul--- - name: Debug make_bmaas_crc_attach_network_env when: make_bmaas_crc_attach_network_env is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_env - name: Debug make_bmaas_crc_attach_network_params when: make_bmaas_crc_attach_network_params is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_params - name: Run bmaas_crc_attach_network retries: "{{ make_bmaas_crc_attach_network_retries | default(omit) }}" delay: "{{ make_bmaas_crc_attach_network_delay | default(omit) }}" until: "{{ make_bmaas_crc_attach_network_until | default(true) }}" register: "make_bmaas_crc_attach_network_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_attach_network" dry_run: "{{ make_bmaas_crc_attach_network_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_attach_network_env|default({})), **(make_bmaas_crc_attach_network_params|default({}))) }}" ././@LongLink0000644000000000000000000000020100000000000011574 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_attach_network_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000224115134411253033334 0ustar zuulzuul--- - name: Debug make_bmaas_crc_attach_network_cleanup_env when: make_bmaas_crc_attach_network_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_cleanup_env - name: Debug make_bmaas_crc_attach_network_cleanup_params when: make_bmaas_crc_attach_network_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_crc_attach_network_cleanup_params - name: Run bmaas_crc_attach_network_cleanup retries: "{{ make_bmaas_crc_attach_network_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_crc_attach_network_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_crc_attach_network_cleanup_until | default(true) }}" register: "make_bmaas_crc_attach_network_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_attach_network_cleanup" dry_run: "{{ make_bmaas_crc_attach_network_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_attach_network_cleanup_env|default({})), **(make_bmaas_crc_attach_network_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017300000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_baremetal_bridge.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000210715134411253033335 0ustar zuulzuul--- - name: Debug make_bmaas_crc_baremetal_bridge_env when: make_bmaas_crc_baremetal_bridge_env is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_env - name: Debug make_bmaas_crc_baremetal_bridge_params when: make_bmaas_crc_baremetal_bridge_params is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_params - name: Run bmaas_crc_baremetal_bridge retries: "{{ make_bmaas_crc_baremetal_bridge_retries | default(omit) }}" delay: "{{ make_bmaas_crc_baremetal_bridge_delay | default(omit) }}" until: "{{ make_bmaas_crc_baremetal_bridge_until | default(true) }}" register: "make_bmaas_crc_baremetal_bridge_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_baremetal_bridge" dry_run: "{{ make_bmaas_crc_baremetal_bridge_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_baremetal_bridge_env|default({})), **(make_bmaas_crc_baremetal_bridge_params|default({}))) }}" ././@LongLink0000644000000000000000000000020300000000000011576 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_crc_baremetal_bridge_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cr0000644000175000017500000000227715134411253033345 0ustar zuulzuul--- - name: Debug make_bmaas_crc_baremetal_bridge_cleanup_env when: make_bmaas_crc_baremetal_bridge_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_cleanup_env - name: Debug make_bmaas_crc_baremetal_bridge_cleanup_params when: make_bmaas_crc_baremetal_bridge_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_crc_baremetal_bridge_cleanup_params - name: Run bmaas_crc_baremetal_bridge_cleanup retries: "{{ make_bmaas_crc_baremetal_bridge_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_crc_baremetal_bridge_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_crc_baremetal_bridge_cleanup_until | default(true) }}" register: "make_bmaas_crc_baremetal_bridge_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_crc_baremetal_bridge_cleanup" dry_run: "{{ make_bmaas_crc_baremetal_bridge_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_crc_baremetal_bridge_cleanup_env|default({})), **(make_bmaas_crc_baremetal_bridge_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_baremetal_net_nad.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ba0000644000175000017500000000203215134411253033310 0ustar zuulzuul--- - name: Debug make_bmaas_baremetal_net_nad_env when: make_bmaas_baremetal_net_nad_env is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_env - name: Debug make_bmaas_baremetal_net_nad_params when: make_bmaas_baremetal_net_nad_params is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_params - name: Run bmaas_baremetal_net_nad retries: "{{ make_bmaas_baremetal_net_nad_retries | default(omit) }}" delay: "{{ make_bmaas_baremetal_net_nad_delay | default(omit) }}" until: "{{ make_bmaas_baremetal_net_nad_until | default(true) }}" register: "make_bmaas_baremetal_net_nad_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_baremetal_net_nad" dry_run: "{{ make_bmaas_baremetal_net_nad_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_baremetal_net_nad_env|default({})), **(make_bmaas_baremetal_net_nad_params|default({}))) }}" ././@LongLink0000644000000000000000000000020000000000000011573 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_baremetal_net_nad_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ba0000644000175000017500000000222215134411253033311 0ustar zuulzuul--- - name: Debug make_bmaas_baremetal_net_nad_cleanup_env when: make_bmaas_baremetal_net_nad_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_cleanup_env - name: Debug make_bmaas_baremetal_net_nad_cleanup_params when: make_bmaas_baremetal_net_nad_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_baremetal_net_nad_cleanup_params - name: Run bmaas_baremetal_net_nad_cleanup retries: "{{ make_bmaas_baremetal_net_nad_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_baremetal_net_nad_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_baremetal_net_nad_cleanup_until | default(true) }}" register: "make_bmaas_baremetal_net_nad_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_baremetal_net_nad_cleanup" dry_run: "{{ make_bmaas_baremetal_net_nad_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_baremetal_net_nad_cleanup_env|default({})), **(make_bmaas_baremetal_net_nad_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016600000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_metallb_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_me0000644000175000017500000000177415134411253033343 0ustar zuulzuul--- - name: Debug make_bmaas_metallb_cleanup_env when: make_bmaas_metallb_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_metallb_cleanup_env - name: Debug make_bmaas_metallb_cleanup_params when: make_bmaas_metallb_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_metallb_cleanup_params - name: Run bmaas_metallb_cleanup retries: "{{ make_bmaas_metallb_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_metallb_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_metallb_cleanup_until | default(true) }}" register: "make_bmaas_metallb_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_metallb_cleanup" dry_run: "{{ make_bmaas_metallb_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_metallb_cleanup_env|default({})), **(make_bmaas_metallb_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_virtual_bms.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_vi0000644000175000017500000000170015134411253033345 0ustar zuulzuul--- - name: Debug make_bmaas_virtual_bms_env when: make_bmaas_virtual_bms_env is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_env - name: Debug make_bmaas_virtual_bms_params when: make_bmaas_virtual_bms_params is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_params - name: Run bmaas_virtual_bms retries: "{{ make_bmaas_virtual_bms_retries | default(omit) }}" delay: "{{ make_bmaas_virtual_bms_delay | default(omit) }}" until: "{{ make_bmaas_virtual_bms_until | default(true) }}" register: "make_bmaas_virtual_bms_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_virtual_bms" dry_run: "{{ make_bmaas_virtual_bms_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_virtual_bms_env|default({})), **(make_bmaas_virtual_bms_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_virtual_bms_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_vi0000644000175000017500000000207015134411253033346 0ustar zuulzuul--- - name: Debug make_bmaas_virtual_bms_cleanup_env when: make_bmaas_virtual_bms_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_cleanup_env - name: Debug make_bmaas_virtual_bms_cleanup_params when: make_bmaas_virtual_bms_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_virtual_bms_cleanup_params - name: Run bmaas_virtual_bms_cleanup retries: "{{ make_bmaas_virtual_bms_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_virtual_bms_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_virtual_bms_cleanup_until | default(true) }}" register: "make_bmaas_virtual_bms_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_virtual_bms_cleanup" dry_run: "{{ make_bmaas_virtual_bms_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_virtual_bms_cleanup_env|default({})), **(make_bmaas_virtual_bms_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_sushy_emulator.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_su0000644000175000017500000000175515134411253033370 0ustar zuulzuul--- - name: Debug make_bmaas_sushy_emulator_env when: make_bmaas_sushy_emulator_env is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_env - name: Debug make_bmaas_sushy_emulator_params when: make_bmaas_sushy_emulator_params is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_params - name: Run bmaas_sushy_emulator retries: "{{ make_bmaas_sushy_emulator_retries | default(omit) }}" delay: "{{ make_bmaas_sushy_emulator_delay | default(omit) }}" until: "{{ make_bmaas_sushy_emulator_until | default(true) }}" register: "make_bmaas_sushy_emulator_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_sushy_emulator" dry_run: "{{ make_bmaas_sushy_emulator_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_sushy_emulator_env|default({})), **(make_bmaas_sushy_emulator_params|default({}))) }}" ././@LongLink0000644000000000000000000000017500000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_sushy_emulator_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_su0000644000175000017500000000214515134411253033362 0ustar zuulzuul--- - name: Debug make_bmaas_sushy_emulator_cleanup_env when: make_bmaas_sushy_emulator_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_cleanup_env - name: Debug make_bmaas_sushy_emulator_cleanup_params when: make_bmaas_sushy_emulator_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_cleanup_params - name: Run bmaas_sushy_emulator_cleanup retries: "{{ make_bmaas_sushy_emulator_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_sushy_emulator_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_sushy_emulator_cleanup_until | default(true) }}" register: "make_bmaas_sushy_emulator_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_sushy_emulator_cleanup" dry_run: "{{ make_bmaas_sushy_emulator_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_sushy_emulator_cleanup_env|default({})), **(make_bmaas_sushy_emulator_cleanup_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_sushy_emulator_wait.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_su0000644000175000017500000000207015134411253033357 0ustar zuulzuul--- - name: Debug make_bmaas_sushy_emulator_wait_env when: make_bmaas_sushy_emulator_wait_env is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_wait_env - name: Debug make_bmaas_sushy_emulator_wait_params when: make_bmaas_sushy_emulator_wait_params is defined ansible.builtin.debug: var: make_bmaas_sushy_emulator_wait_params - name: Run bmaas_sushy_emulator_wait retries: "{{ make_bmaas_sushy_emulator_wait_retries | default(omit) }}" delay: "{{ make_bmaas_sushy_emulator_wait_delay | default(omit) }}" until: "{{ make_bmaas_sushy_emulator_wait_until | default(true) }}" register: "make_bmaas_sushy_emulator_wait_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_sushy_emulator_wait" dry_run: "{{ make_bmaas_sushy_emulator_wait_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_sushy_emulator_wait_env|default({})), **(make_bmaas_sushy_emulator_wait_params|default({}))) }}" ././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_generate_nodes_yaml.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_ge0000644000175000017500000000207015134411253033323 0ustar zuulzuul--- - name: Debug make_bmaas_generate_nodes_yaml_env when: make_bmaas_generate_nodes_yaml_env is defined ansible.builtin.debug: var: make_bmaas_generate_nodes_yaml_env - name: Debug make_bmaas_generate_nodes_yaml_params when: make_bmaas_generate_nodes_yaml_params is defined ansible.builtin.debug: var: make_bmaas_generate_nodes_yaml_params - name: Run bmaas_generate_nodes_yaml retries: "{{ make_bmaas_generate_nodes_yaml_retries | default(omit) }}" delay: "{{ make_bmaas_generate_nodes_yaml_delay | default(omit) }}" until: "{{ make_bmaas_generate_nodes_yaml_until | default(true) }}" register: "make_bmaas_generate_nodes_yaml_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_generate_nodes_yaml" dry_run: "{{ make_bmaas_generate_nodes_yaml_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_generate_nodes_yaml_env|default({})), **(make_bmaas_generate_nodes_yaml_params|default({}))) }}" ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas.ym0000644000175000017500000000141415134411253033275 0ustar zuulzuul--- - name: Debug make_bmaas_env when: make_bmaas_env is defined ansible.builtin.debug: var: make_bmaas_env - name: Debug make_bmaas_params when: make_bmaas_params is defined ansible.builtin.debug: var: make_bmaas_params - name: Run bmaas retries: "{{ make_bmaas_retries | default(omit) }}" delay: "{{ make_bmaas_delay | default(omit) }}" until: "{{ make_bmaas_until | default(true) }}" register: "make_bmaas_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas" dry_run: "{{ make_bmaas_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_env|default({})), **(make_bmaas_params|default({}))) }}" ././@LongLink0000644000000000000000000000015600000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cleanup.ymlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/roles/install_yamls_makes/tasks/make_bmaas_cl0000644000175000017500000000160415134411253033330 0ustar zuulzuul--- - name: Debug make_bmaas_cleanup_env when: make_bmaas_cleanup_env is defined ansible.builtin.debug: var: make_bmaas_cleanup_env - name: Debug make_bmaas_cleanup_params when: make_bmaas_cleanup_params is defined ansible.builtin.debug: var: make_bmaas_cleanup_params - name: Run bmaas_cleanup retries: "{{ make_bmaas_cleanup_retries | default(omit) }}" delay: "{{ make_bmaas_cleanup_delay | default(omit) }}" until: "{{ make_bmaas_cleanup_until | default(true) }}" register: "make_bmaas_cleanup_status" cifmw.general.ci_script: output_dir: "{{ cifmw_basedir }}/artifacts" chdir: "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup" script: "make bmaas_cleanup" dry_run: "{{ make_bmaas_cleanup_dryrun|default(false)|bool }}" extra_args: "{{ dict((make_bmaas_cleanup_env|default({})), **(make_bmaas_cleanup_params|default({}))) }}" home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/0000755000175000017500000000000015134437263024516 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/zuul-params.yml0000644000175000017500000003535315134437263027532 0ustar zuulzuuladoption_extra_vars: 'supported_volume_backends: [] supported_backup_backends: [] ' cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_dlrn_report_result: false cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_kubeconfig: '{{ ansible_user_dir }}/.crc/machines/crc/kubeconfig' cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller cloud_domain: ooo.test crc_ci_bootstrap_cloud_name: '{{ nodepool.cloud | replace(''-nodepool-tripleo'','''') }}' crc_ci_bootstrap_networking: instances: controller: networks: default: ip: 192.168.122.11 internal-api: ip: 172.17.0.4 storage: ip: 172.18.0.4 storage_mgmt: ip: 172.20.0.4 tenant: ip: 172.19.0.4 crc: networks: default: ip: 192.168.122.10 internal-api: ip: 172.17.0.5 storage: ip: 172.18.0.5 storage_mgmt: ip: 172.20.0.5 tenant: ip: 172.19.0.5 standalone: networks: default: config_nm: false ip: 192.168.122.100 internal-api: config_nm: false ip: 172.17.0.100 storage: config_nm: false ip: 172.18.0.100 storage_mgmt: config_nm: false ip: 172.20.0.100 tenant: config_nm: false ip: 172.19.0.100 networks: default: mtu: '{{ (''ibm'' in nodepool.cloud) | ternary(''1440'', ''1500'') }}' range: 192.168.122.0/24 router_net: '' transparent: true internal-api: range: 172.17.0.0/24 vlan: 20 storage: range: 172.18.0.0/24 vlan: 21 storage_mgmt: range: 172.20.0.0/24 vlan: 23 tenant: range: 172.19.0.0/24 vlan: 22 dpa_test_suite: test-minimal enable_barbican: 'false' enable_octavia: 'true' enable_ramdisk: true enable_telemetry: 'true' enable_tls: 'true' osp_17_ceph_repos: - rhceph-7-tools-for-rhel-9-x86_64-rpms osp_17_repos: - rhel-9-for-x86_64-baseos-eus-rpms - rhel-9-for-x86_64-appstream-eus-rpms - rhel-9-for-x86_64-highavailability-eus-rpms - openstack-17.1-for-rhel-9-x86_64-rpms - fast-datapath-for-rhel-9-x86_64-rpms push_registry: quay.rdoproject.org quay_login_secret_name: quay_nextgen_zuulgithubci registry_login_enabled: true standalone_gateway: 192.168.122.10 standalone_ip: 192.168.122.100 standalone_private_key: /home/zuul/.ssh/id_rsa standalone_vm_inventory: standalone_vm_inventory upstream_control_plane_dns: 192.168.122.10 use_ceph: 'false' zuul: _inheritance_path: - '' - '' - '' - '' - '' - '' ansible_version: '8' attempts: 1 branch: main build: 3911aa2e57464e5ea41c24d6ab361757 build_refs: - branch: main change: '1202' change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null buildset: ae39f936defb47d8b8026507b61685a3 buildset_refs: - branch: main change: '1202' change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null change: '1202' change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 child_jobs: [] commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 event_id: 46eb15f0-f788-11f0-870c-f967a8119d20 executor: hostname: ze04.softwarefactory-project.io inventory_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/ansible/inventory.yaml log_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/logs result_data_file: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/results.json src_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work/src work_root: /var/lib/zuul/builds/3911aa2e57464e5ea41c24d6ab361757/work items: - branch: main change: '1202' change_url: https://github.com/openstack-k8s-operators/data-plane-adoption/pull/1202 commit_id: a6dda39287a9d88f8d44f99969c3909ff61d8792 patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption topic: null job: adoption-standalone-to-crc-no-ceph jobtags: [] max_attempts: 1 message: dW5pMDRkZWx0YS1pcHY2OiBObyBPVk4gREhDUCBmb3IgYmFyZW1ldGFsIHBvcnRzCgpBZGRzIGBvdm4vZGlzYWJsZV9vdm5fZGhjcF9mb3JfYmFyZW1ldGFsX3BvcnRzYCBjb25maWd1cmF0aW9uIHRvIHRoZSB1bmkwNGRlbHRhLWlwdjYgYGNvbmZpZ19kb3dubG9hZC55YW1sYCB1c2luZyBgbmV1dHJvbjo6Y29uZmlnOjpwbHVnaW5fbWwyX2NvbmZpZ2AuDQogICAgDQpUaGlzIGRpc2FibGVzIE9WTidzIGJ1aWx0LWluIERIQ1Agc2VydmljZSBmb3IgYmFyZW1ldGFsIHBvcnRzLCBhbGxvd2luZyBleHRlcm5hbCBESENQIHRvIGJlIHVzZWQgaW5zdGVhZC4gVXNlcyB0aGUgY29uZmlnIGNsYXNzIGFwcHJvYWNoIHNpbmNlIHRoZSBkaXJlY3QgcHVwcGV0IHBhcmFtZXRlciB3YXMgbm90IGJhY2twb3J0ZWQgdG8gT1NQIDE3LjEuDQoNCkppcmE6IFtPU1BSSC0yMDAyMV0oaHR0cHM6Ly9pc3N1ZXMucmVkaGF0LmNvbS8vYnJvd3NlL09TUFJILTIwMDIxKQ== patchset: a6dda39287a9d88f8d44f99969c3909ff61d8792 pipeline: github-check playbook_context: playbook_projects: trusted/project_0/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 trusted/project_1/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 trusted/project_2/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 trusted/project_3/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_0/github.com/openstack-k8s-operators/ci-framework: canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d untrusted/project_1/review.rdoproject.org/config: canonical_name: review.rdoproject.org/config checkout: master commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 untrusted/project_2/opendev.org/zuul/zuul-jobs: canonical_name: opendev.org/zuul/zuul-jobs checkout: master commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 untrusted/project_3/review.rdoproject.org/rdo-jobs: canonical_name: review.rdoproject.org/rdo-jobs checkout: master commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 playbooks: - path: untrusted/project_3/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/deploy_standalone_run_repo_tests.yaml roles: - checkout: main checkout_description: project override ref link_name: ansible/playbook_0/role_0/ci-framework link_target: untrusted/project_0/github.com/openstack-k8s-operators/ci-framework role_path: ansible/playbook_0/role_0/ci-framework/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_1/config link_target: untrusted/project_1/review.rdoproject.org/config role_path: ansible/playbook_0/role_1/config/roles - checkout: master checkout_description: project default branch link_name: ansible/playbook_0/role_2/zuul-jobs link_target: untrusted/project_2/opendev.org/zuul/zuul-jobs role_path: ansible/playbook_0/role_2/zuul-jobs/roles - checkout: master checkout_description: playbook branch link_name: ansible/playbook_0/role_3/rdo-jobs link_target: untrusted/project_3/review.rdoproject.org/rdo-jobs role_path: ansible/playbook_0/role_3/rdo-jobs/roles post_review: false project: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption name: openstack-k8s-operators/data-plane-adoption short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption projects: github.com/crc-org/crc-cloud: canonical_hostname: github.com canonical_name: github.com/crc-org/crc-cloud checkout: main checkout_description: project override ref commit: 42957126d9d9b9d1372615db325b82bd992fa335 name: crc-org/crc-cloud required: true short_name: crc-cloud src_dir: src/github.com/crc-org/crc-cloud github.com/openstack-k8s-operators/architecture: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/architecture checkout: main checkout_description: project override ref commit: 2eaa1ef0f72a4e7ef0a5042b135993cbfc51eacc name: openstack-k8s-operators/architecture required: true short_name: architecture src_dir: src/github.com/openstack-k8s-operators/architecture github.com/openstack-k8s-operators/ci-framework: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/ci-framework checkout: main checkout_description: project override ref commit: daa791825fcb1f23b3b87c68747b1dbe292d2d2d name: openstack-k8s-operators/ci-framework required: true short_name: ci-framework src_dir: src/github.com/openstack-k8s-operators/ci-framework github.com/openstack-k8s-operators/data-plane-adoption: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/data-plane-adoption checkout: main checkout_description: project override ref commit: 7346bd354c161fbd39016de93e1981fb8edfe179 name: openstack-k8s-operators/data-plane-adoption required: true short_name: data-plane-adoption src_dir: src/github.com/openstack-k8s-operators/data-plane-adoption github.com/openstack-k8s-operators/install_yamls: canonical_hostname: github.com canonical_name: github.com/openstack-k8s-operators/install_yamls checkout: main checkout_description: project override ref commit: 13897053815e2df424a56208aa288cf95b7283d1 name: openstack-k8s-operators/install_yamls required: true short_name: install_yamls src_dir: src/github.com/openstack-k8s-operators/install_yamls opendev.org/zuul/zuul-jobs: canonical_hostname: opendev.org canonical_name: opendev.org/zuul/zuul-jobs checkout: master checkout_description: project default branch commit: 1d0e62540c53a110ce48eaedb202bbd70f327c48 name: zuul/zuul-jobs required: true short_name: zuul-jobs src_dir: src/opendev.org/zuul/zuul-jobs review.rdoproject.org/config: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/config checkout: master checkout_description: project default branch commit: 6b8ed36de1a9fb17d1895a907c56e0ebed359010 name: config required: true short_name: config src_dir: src/review.rdoproject.org/config review.rdoproject.org/rdo-jobs: canonical_hostname: review.rdoproject.org canonical_name: review.rdoproject.org/rdo-jobs checkout: master checkout_description: project default branch commit: d6509b6cfec74740c76a12d94dd9fc8ce1990297 name: rdo-jobs required: true short_name: rdo-jobs src_dir: src/review.rdoproject.org/rdo-jobs ref: refs/pull/1202/head resources: {} tenant: rdoproject.org timeout: 14400 topic: null voting: true zuul_log_collection: true home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/install-yamls-params.yml0000644000175000017500000006664615134437263031335 0ustar zuulzuulcifmw_install_yamls_defaults: ADOPTED_EXTERNAL_NETWORK: 172.21.1.0/24 ADOPTED_INTERNALAPI_NETWORK: 172.17.1.0/24 ADOPTED_STORAGEMGMT_NETWORK: 172.20.1.0/24 ADOPTED_STORAGE_NETWORK: 172.18.1.0/24 ADOPTED_TENANT_NETWORK: 172.9.1.0/24 ANSIBLEEE: config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_BRANCH: main ANSIBLEEE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml ANSIBLEEE_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest ANSIBLEEE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml ANSIBLEEE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests ANSIBLEEE_KUTTL_NAMESPACE: ansibleee-kuttl-tests ANSIBLEEE_REPO: https://github.com/openstack-k8s-operators/openstack-ansibleee-operator ANSIBLEE_COMMIT_HASH: '' BARBICAN: config/samples/barbican_v1beta1_barbican.yaml BARBICAN_BRANCH: main BARBICAN_COMMIT_HASH: '' BARBICAN_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml BARBICAN_DEPL_IMG: unused BARBICAN_IMG: quay.io/openstack-k8s-operators/barbican-operator-index:latest BARBICAN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml BARBICAN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests BARBICAN_KUTTL_NAMESPACE: barbican-kuttl-tests BARBICAN_REPO: https://github.com/openstack-k8s-operators/barbican-operator.git BARBICAN_SERVICE_ENABLED: 'true' BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY: sE**********U= BAREMETAL_BRANCH: main BAREMETAL_COMMIT_HASH: '' BAREMETAL_IMG: quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest BAREMETAL_OS_CONTAINER_IMG: '' BAREMETAL_OS_IMG: '' BAREMETAL_OS_IMG_TYPE: '' BAREMETAL_REPO: https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git BAREMETAL_TIMEOUT: 20m BASH_IMG: quay.io/openstack-k8s-operators/bash:latest BGP_ASN: '64999' BGP_LEAF_1: 100.65.4.1 BGP_LEAF_2: 100.64.4.1 BGP_OVN_ROUTING: 'false' BGP_PEER_ASN: '64999' BGP_SOURCE_IP: 172.30.4.2 BGP_SOURCE_IP6: f00d:f00d:f00d:f00d:f00d:f00d:f00d:42 BMAAS_BRIDGE_IPV4_PREFIX: 172.20.1.2/24 BMAAS_BRIDGE_IPV6_PREFIX: fd00:bbbb::2/64 BMAAS_INSTANCE_DISK_SIZE: '20' BMAAS_INSTANCE_MEMORY: '4096' BMAAS_INSTANCE_NAME_PREFIX: crc-bmaas BMAAS_INSTANCE_NET_MODEL: virtio BMAAS_INSTANCE_OS_VARIANT: centos-stream9 BMAAS_INSTANCE_VCPUS: '2' BMAAS_INSTANCE_VIRT_TYPE: kvm BMAAS_IPV4: 'true' BMAAS_IPV6: 'false' BMAAS_LIBVIRT_USER: sushyemu BMAAS_METALLB_ADDRESS_POOL: 172.20.1.64/26 BMAAS_METALLB_POOL_NAME: baremetal BMAAS_NETWORK_IPV4_PREFIX: 172.20.1.1/24 BMAAS_NETWORK_IPV6_PREFIX: fd00:bbbb::1/64 BMAAS_NETWORK_NAME: crc-bmaas BMAAS_NODE_COUNT: '1' BMAAS_OCP_INSTANCE_NAME: crc BMAAS_REDFISH_PASSWORD: password BMAAS_REDFISH_USERNAME: admin BMAAS_ROUTE_LIBVIRT_NETWORKS: crc-bmaas,crc,default BMAAS_SUSHY_EMULATOR_DRIVER: libvirt BMAAS_SUSHY_EMULATOR_IMAGE: quay.io/metal3-io/sushy-tools:latest BMAAS_SUSHY_EMULATOR_NAMESPACE: sushy-emulator BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE: /etc/openstack/clouds.yaml BMAAS_SUSHY_EMULATOR_OS_CLOUD: openstack BMH_NAMESPACE: openstack BMO_BRANCH: release-0.9 BMO_CLEANUP: 'true' BMO_COMMIT_HASH: '' BMO_IPA_BRANCH: stable/2024.1 BMO_IRONIC_HOST: 192.168.122.10 BMO_PROVISIONING_INTERFACE: '' BMO_REPO: https://github.com/metal3-io/baremetal-operator BMO_SETUP: false BMO_SETUP_ROUTE_REPLACE: 'true' BM_CTLPLANE_INTERFACE: enp1s0 BM_INSTANCE_MEMORY: '8192' BM_INSTANCE_NAME_PREFIX: edpm-compute-baremetal BM_INSTANCE_NAME_SUFFIX: '0' BM_NETWORK_NAME: default BM_NODE_COUNT: '1' BM_ROOT_PASSWORD: '' BM_ROOT_PASSWORD_SECRET: '' CEILOMETER_CENTRAL_DEPL_IMG: unused CEILOMETER_NOTIFICATION_DEPL_IMG: unused CEPH_BRANCH: release-1.15 CEPH_CLIENT: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml CEPH_COMMON: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml CEPH_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml CEPH_CRDS: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml CEPH_IMG: quay.io/ceph/demo:latest-squid CEPH_OP: /home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml CEPH_REPO: https://github.com/rook/rook.git CERTMANAGER_TIMEOUT: 300s CHECKOUT_FROM_OPENSTACK_REF: 'true' CINDER: config/samples/cinder_v1beta1_cinder.yaml CINDERAPI_DEPL_IMG: unused CINDERBKP_DEPL_IMG: unused CINDERSCH_DEPL_IMG: unused CINDERVOL_DEPL_IMG: unused CINDER_BRANCH: main CINDER_COMMIT_HASH: '' CINDER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml CINDER_IMG: quay.io/openstack-k8s-operators/cinder-operator-index:latest CINDER_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml CINDER_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests CINDER_KUTTL_NAMESPACE: cinder-kuttl-tests CINDER_REPO: https://github.com/openstack-k8s-operators/cinder-operator.git CLEANUP_DIR_CMD: rm -Rf CRC_BGP_NIC_1_MAC: '52:54:00:11:11:11' CRC_BGP_NIC_2_MAC: '52:54:00:11:11:12' CRC_HTTPS_PROXY: '' CRC_HTTP_PROXY: '' CRC_STORAGE_NAMESPACE: crc-storage CRC_STORAGE_RETRIES: '3' CRC_URL: '''https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz''' CRC_VERSION: latest DATAPLANE_ANSIBLE_SECRET: dataplane-ansible-ssh-private-key-secret DATAPLANE_ANSIBLE_USER: '' DATAPLANE_COMPUTE_IP: 192.168.122.100 DATAPLANE_CONTAINER_PREFIX: openstack DATAPLANE_CONTAINER_TAG: current-podified DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG: quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest DATAPLANE_DEFAULT_GW: 192.168.122.1 DATAPLANE_EXTRA_NOVA_CONFIG_FILE: /dev/null DATAPLANE_GROWVOLS_ARGS: /=8GB /tmp=1GB /home=1GB /var=100% DATAPLANE_KUSTOMIZE_SCENARIO: preprovisioned DATAPLANE_NETWORKER_IP: 192.168.122.200 DATAPLANE_NETWORK_INTERFACE_NAME: eth0 DATAPLANE_NOVA_NFS_PATH: '' DATAPLANE_NTP_SERVER: pool.ntp.org DATAPLANE_PLAYBOOK: osp.edpm.download_cache DATAPLANE_REGISTRY_URL: quay.io/podified-antelope-centos9 DATAPLANE_RUNNER_IMG: '' DATAPLANE_SERVER_ROLE: compute DATAPLANE_SSHD_ALLOWED_RANGES: '[''192.168.122.0/24'']' DATAPLANE_TIMEOUT: 30m DATAPLANE_TLS_ENABLED: 'true' DATAPLANE_TOTAL_NETWORKER_NODES: '1' DATAPLANE_TOTAL_NODES: '1' DBSERVICE: galera DESIGNATE: config/samples/designate_v1beta1_designate.yaml DESIGNATE_BRANCH: main DESIGNATE_COMMIT_HASH: '' DESIGNATE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml DESIGNATE_IMG: quay.io/openstack-k8s-operators/designate-operator-index:latest DESIGNATE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml DESIGNATE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests DESIGNATE_KUTTL_NAMESPACE: designate-kuttl-tests DESIGNATE_REPO: https://github.com/openstack-k8s-operators/designate-operator.git DNSDATA: config/samples/network_v1beta1_dnsdata.yaml DNSDATA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml DNSMASQ: config/samples/network_v1beta1_dnsmasq.yaml DNSMASQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml DNS_DEPL_IMG: unused DNS_DOMAIN: localdomain DOWNLOAD_TOOLS_SELECTION: all EDPM_ATTACH_EXTNET: 'true' EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES: '''[]''' EDPM_COMPUTE_ADDITIONAL_NETWORKS: '''[]''' EDPM_COMPUTE_CELLS: '1' EDPM_COMPUTE_CEPH_ENABLED: 'true' EDPM_COMPUTE_CEPH_NOVA: 'true' EDPM_COMPUTE_DHCP_AGENT_ENABLED: 'true' EDPM_COMPUTE_SRIOV_ENABLED: 'true' EDPM_COMPUTE_SUFFIX: '0' EDPM_CONFIGURE_DEFAULT_ROUTE: 'true' EDPM_CONFIGURE_HUGEPAGES: 'false' EDPM_CONFIGURE_NETWORKING: 'true' EDPM_FIRSTBOOT_EXTRA: /tmp/edpm-firstboot-extra EDPM_NETWORKER_SUFFIX: '0' EDPM_TOTAL_NETWORKERS: '1' EDPM_TOTAL_NODES: '1' GALERA_REPLICAS: '' GENERATE_SSH_KEYS: 'true' GIT_CLONE_OPTS: '' GLANCE: config/samples/glance_v1beta1_glance.yaml GLANCEAPI_DEPL_IMG: unused GLANCE_BRANCH: main GLANCE_COMMIT_HASH: '' GLANCE_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml GLANCE_IMG: quay.io/openstack-k8s-operators/glance-operator-index:latest GLANCE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml GLANCE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests GLANCE_KUTTL_NAMESPACE: glance-kuttl-tests GLANCE_REPO: https://github.com/openstack-k8s-operators/glance-operator.git HEAT: config/samples/heat_v1beta1_heat.yaml HEATAPI_DEPL_IMG: unused HEATCFNAPI_DEPL_IMG: unused HEATENGINE_DEPL_IMG: unused HEAT_AUTH_ENCRYPTION_KEY: 76**********f0 HEAT_BRANCH: main HEAT_COMMIT_HASH: '' HEAT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml HEAT_IMG: quay.io/openstack-k8s-operators/heat-operator-index:latest HEAT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml HEAT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests HEAT_KUTTL_NAMESPACE: heat-kuttl-tests HEAT_REPO: https://github.com/openstack-k8s-operators/heat-operator.git HEAT_SERVICE_ENABLED: 'true' HORIZON: config/samples/horizon_v1beta1_horizon.yaml HORIZON_BRANCH: main HORIZON_COMMIT_HASH: '' HORIZON_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml HORIZON_DEPL_IMG: unused HORIZON_IMG: quay.io/openstack-k8s-operators/horizon-operator-index:latest HORIZON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml HORIZON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests HORIZON_KUTTL_NAMESPACE: horizon-kuttl-tests HORIZON_REPO: https://github.com/openstack-k8s-operators/horizon-operator.git INFRA_BRANCH: main INFRA_COMMIT_HASH: '' INFRA_IMG: quay.io/openstack-k8s-operators/infra-operator-index:latest INFRA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml INFRA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests INFRA_KUTTL_NAMESPACE: infra-kuttl-tests INFRA_REPO: https://github.com/openstack-k8s-operators/infra-operator.git INSTALL_CERT_MANAGER: 'true' INSTALL_NMSTATE: true || false INSTALL_NNCP: true || false INTERNALAPI_HOST_ROUTES: '' IPV6_LAB_IPV4_NETWORK_IPADDRESS: 172.30.0.1/24 IPV6_LAB_IPV6_NETWORK_IPADDRESS: fd00:abcd:abcd:fc00::1/64 IPV6_LAB_LIBVIRT_STORAGE_POOL: default IPV6_LAB_MANAGE_FIREWALLD: 'true' IPV6_LAB_NAT64_HOST_IPV4: 172.30.0.2/24 IPV6_LAB_NAT64_HOST_IPV6: fd00:abcd:abcd:fc00::2/64 IPV6_LAB_NAT64_INSTANCE_NAME: nat64-router IPV6_LAB_NAT64_IPV6_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL: 192.168.255.0/24 IPV6_LAB_NAT64_TAYGA_IPV4: 192.168.255.1 IPV6_LAB_NAT64_TAYGA_IPV6: fd00:abcd:abcd:fc00::3 IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX: fd00:abcd:abcd:fcff::/96 IPV6_LAB_NAT64_UPDATE_PACKAGES: 'false' IPV6_LAB_NETWORK_NAME: nat64 IPV6_LAB_SNO_CLUSTER_NETWORK: fd00:abcd:0::/48 IPV6_LAB_SNO_HOST_IP: fd00:abcd:abcd:fc00::11 IPV6_LAB_SNO_HOST_PREFIX: '64' IPV6_LAB_SNO_INSTANCE_NAME: sno IPV6_LAB_SNO_MACHINE_NETWORK: fd00:abcd:abcd:fc00::/64 IPV6_LAB_SNO_OCP_MIRROR_URL: https://mirror.openshift.com/pub/openshift-v4/clients/ocp IPV6_LAB_SNO_OCP_VERSION: latest-4.14 IPV6_LAB_SNO_SERVICE_NETWORK: fd00:abcd:abcd:fc03::/112 IPV6_LAB_SSH_PUB_KEY: /home/zuul/.ssh/id_rsa.pub IPV6_LAB_WORK_DIR: /home/zuul/.ipv6lab IRONIC: config/samples/ironic_v1beta1_ironic.yaml IRONICAPI_DEPL_IMG: unused IRONICCON_DEPL_IMG: unused IRONICINS_DEPL_IMG: unused IRONICNAG_DEPL_IMG: unused IRONICPXE_DEPL_IMG: unused IRONIC_BRANCH: main IRONIC_COMMIT_HASH: '' IRONIC_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml IRONIC_IMAGE: quay.io/metal3-io/ironic IRONIC_IMAGE_TAG: release-24.1 IRONIC_IMG: quay.io/openstack-k8s-operators/ironic-operator-index:latest IRONIC_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml IRONIC_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests IRONIC_KUTTL_NAMESPACE: ironic-kuttl-tests IRONIC_REPO: https://github.com/openstack-k8s-operators/ironic-operator.git KEYSTONEAPI: config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml KEYSTONEAPI_DEPL_IMG: unused KEYSTONE_BRANCH: main KEYSTONE_COMMIT_HASH: '' KEYSTONE_FEDERATION_CLIENT_SECRET: CO**********6f KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE: openstack KEYSTONE_IMG: quay.io/openstack-k8s-operators/keystone-operator-index:latest KEYSTONE_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml KEYSTONE_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests KEYSTONE_KUTTL_NAMESPACE: keystone-kuttl-tests KEYSTONE_REPO: https://github.com/openstack-k8s-operators/keystone-operator.git KUBEADMIN_PWD: '12345678' LIBVIRT_SECRET: libvirt-secret LOKI_DEPLOY_MODE: openshift-network LOKI_DEPLOY_NAMESPACE: netobserv LOKI_DEPLOY_SIZE: 1x.demo LOKI_NAMESPACE: openshift-operators-redhat LOKI_OPERATOR_GROUP: openshift-operators-redhat-loki LOKI_SUBSCRIPTION: loki-operator LVMS_CR: '1' MANILA: config/samples/manila_v1beta1_manila.yaml MANILAAPI_DEPL_IMG: unused MANILASCH_DEPL_IMG: unused MANILASHARE_DEPL_IMG: unused MANILA_BRANCH: main MANILA_COMMIT_HASH: '' MANILA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml MANILA_IMG: quay.io/openstack-k8s-operators/manila-operator-index:latest MANILA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml MANILA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests MANILA_KUTTL_NAMESPACE: manila-kuttl-tests MANILA_REPO: https://github.com/openstack-k8s-operators/manila-operator.git MANILA_SERVICE_ENABLED: 'true' MARIADB: config/samples/mariadb_v1beta1_galera.yaml MARIADB_BRANCH: main MARIADB_CHAINSAW_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml MARIADB_CHAINSAW_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests MARIADB_CHAINSAW_NAMESPACE: mariadb-chainsaw-tests MARIADB_COMMIT_HASH: '' MARIADB_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml MARIADB_DEPL_IMG: unused MARIADB_IMG: quay.io/openstack-k8s-operators/mariadb-operator-index:latest MARIADB_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml MARIADB_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests MARIADB_KUTTL_NAMESPACE: mariadb-kuttl-tests MARIADB_REPO: https://github.com/openstack-k8s-operators/mariadb-operator.git MEMCACHED: config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml MEMCACHED_DEPL_IMG: unused METADATA_SHARED_SECRET: '12**********42' METALLB_IPV6_POOL: fd00:aaaa::80-fd00:aaaa::90 METALLB_POOL: 192.168.122.80-192.168.122.90 MICROSHIFT: '0' NAMESPACE: openstack NETCONFIG: config/samples/network_v1beta1_netconfig.yaml NETCONFIG_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml NETCONFIG_DEPL_IMG: unused NETOBSERV_DEPLOY_NAMESPACE: netobserv NETOBSERV_NAMESPACE: openshift-netobserv-operator NETOBSERV_OPERATOR_GROUP: openshift-netobserv-operator-net NETOBSERV_SUBSCRIPTION: netobserv-operator NETWORK_BGP: 'false' NETWORK_DESIGNATE_ADDRESS_PREFIX: 172.28.0 NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX: 172.50.0 NETWORK_INTERNALAPI_ADDRESS_PREFIX: 172.17.0 NETWORK_ISOLATION: 'true' NETWORK_ISOLATION_INSTANCE_NAME: crc NETWORK_ISOLATION_IPV4: 'true' NETWORK_ISOLATION_IPV4_ADDRESS: 172.16.1.1/24 NETWORK_ISOLATION_IPV4_NAT: 'true' NETWORK_ISOLATION_IPV6: 'false' NETWORK_ISOLATION_IPV6_ADDRESS: fd00:aaaa::1/64 NETWORK_ISOLATION_IP_ADDRESS: 192.168.122.10 NETWORK_ISOLATION_MAC: '52:54:00:11:11:10' NETWORK_ISOLATION_NETWORK_NAME: net-iso NETWORK_ISOLATION_NET_NAME: default NETWORK_ISOLATION_USE_DEFAULT_NETWORK: 'true' NETWORK_MTU: '1500' NETWORK_STORAGEMGMT_ADDRESS_PREFIX: 172.20.0 NETWORK_STORAGE_ADDRESS_PREFIX: 172.18.0 NETWORK_STORAGE_MACVLAN: '' NETWORK_TENANT_ADDRESS_PREFIX: 172.19.0 NETWORK_VLAN_START: '20' NETWORK_VLAN_STEP: '1' NEUTRONAPI: config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml NEUTRONAPI_DEPL_IMG: unused NEUTRON_BRANCH: main NEUTRON_COMMIT_HASH: '' NEUTRON_IMG: quay.io/openstack-k8s-operators/neutron-operator-index:latest NEUTRON_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml NEUTRON_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests NEUTRON_KUTTL_NAMESPACE: neutron-kuttl-tests NEUTRON_REPO: https://github.com/openstack-k8s-operators/neutron-operator.git NFS_HOME: /home/nfs NMSTATE_NAMESPACE: openshift-nmstate NMSTATE_OPERATOR_GROUP: openshift-nmstate-tn6k8 NMSTATE_SUBSCRIPTION: kubernetes-nmstate-operator NNCP_ADDITIONAL_HOST_ROUTES: '' NNCP_BGP_1_INTERFACE: enp7s0 NNCP_BGP_1_IP_ADDRESS: 100.65.4.2 NNCP_BGP_2_INTERFACE: enp8s0 NNCP_BGP_2_IP_ADDRESS: 100.64.4.2 NNCP_BRIDGE: ospbr NNCP_CLEANUP_TIMEOUT: 120s NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX: 'fd00:aaaa::' NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX: '10' NNCP_CTLPLANE_IP_ADDRESS_PREFIX: 192.168.122 NNCP_CTLPLANE_IP_ADDRESS_SUFFIX: '10' NNCP_DNS_SERVER: 192.168.122.1 NNCP_DNS_SERVER_IPV6: fd00:aaaa::1 NNCP_GATEWAY: 192.168.122.1 NNCP_GATEWAY_IPV6: fd00:aaaa::1 NNCP_INTERFACE: enp6s0 NNCP_NODES: '' NNCP_TIMEOUT: 240s NOVA: config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_BRANCH: main NOVA_COMMIT_HASH: '' NOVA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml NOVA_IMG: quay.io/openstack-k8s-operators/nova-operator-index:latest NOVA_REPO: https://github.com/openstack-k8s-operators/nova-operator.git NUMBER_OF_INSTANCES: '1' OCP_NETWORK_NAME: crc OCTAVIA: config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_BRANCH: main OCTAVIA_COMMIT_HASH: '' OCTAVIA_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml OCTAVIA_IMG: quay.io/openstack-k8s-operators/octavia-operator-index:latest OCTAVIA_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml OCTAVIA_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests OCTAVIA_KUTTL_NAMESPACE: octavia-kuttl-tests OCTAVIA_REPO: https://github.com/openstack-k8s-operators/octavia-operator.git OKD: 'false' OPENSTACK_BRANCH: main OPENSTACK_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-bundle:latest OPENSTACK_COMMIT_HASH: '' OPENSTACK_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_CRDS_DIR: openstack_crds OPENSTACK_CTLPLANE: config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml OPENSTACK_IMG: quay.io/openstack-k8s-operators/openstack-operator-index:latest OPENSTACK_K8S_BRANCH: main OPENSTACK_K8S_TAG: latest OPENSTACK_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml OPENSTACK_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests OPENSTACK_KUTTL_NAMESPACE: openstack-kuttl-tests OPENSTACK_NEUTRON_CUSTOM_CONF: '' OPENSTACK_REPO: https://github.com/openstack-k8s-operators/openstack-operator.git OPENSTACK_STORAGE_BUNDLE_IMG: quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest OPERATOR_BASE_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator OPERATOR_CHANNEL: '' OPERATOR_NAMESPACE: openstack-operators OPERATOR_SOURCE: '' OPERATOR_SOURCE_NAMESPACE: '' OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm OVNCONTROLLER: config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml OVNCONTROLLER_NMAP: 'true' OVNDBS: config/samples/ovn_v1beta1_ovndbcluster.yaml OVNDBS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml OVNNORTHD: config/samples/ovn_v1beta1_ovnnorthd.yaml OVNNORTHD_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml OVN_BRANCH: main OVN_COMMIT_HASH: '' OVN_IMG: quay.io/openstack-k8s-operators/ovn-operator-index:latest OVN_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml OVN_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests OVN_KUTTL_NAMESPACE: ovn-kuttl-tests OVN_REPO: https://github.com/openstack-k8s-operators/ovn-operator.git PASSWORD: '12**********78' PLACEMENTAPI: config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml PLACEMENTAPI_DEPL_IMG: unused PLACEMENT_BRANCH: main PLACEMENT_COMMIT_HASH: '' PLACEMENT_IMG: quay.io/openstack-k8s-operators/placement-operator-index:latest PLACEMENT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml PLACEMENT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests PLACEMENT_KUTTL_NAMESPACE: placement-kuttl-tests PLACEMENT_REPO: https://github.com/openstack-k8s-operators/placement-operator.git PULL_SECRET: /home/zuul/src/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/pull-secret.txt RABBITMQ: docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_BRANCH: patches RABBITMQ_COMMIT_HASH: '' RABBITMQ_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml RABBITMQ_DEPL_IMG: unused RABBITMQ_IMG: quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest RABBITMQ_REPO: https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git REDHAT_OPERATORS: 'false' REDIS: config/samples/redis_v1beta1_redis.yaml REDIS_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml REDIS_DEPL_IMG: unused RH_REGISTRY_PWD: '' RH_REGISTRY_USER: '' SECRET: os**********et SG_CORE_DEPL_IMG: unused STANDALONE_COMPUTE_DRIVER: libvirt STANDALONE_EXTERNAL_NET_PREFFIX: 172.21.0 STANDALONE_INTERNALAPI_NET_PREFIX: 172.17.0 STANDALONE_STORAGEMGMT_NET_PREFIX: 172.20.0 STANDALONE_STORAGE_NET_PREFIX: 172.18.0 STANDALONE_TENANT_NET_PREFIX: 172.19.0 STORAGEMGMT_HOST_ROUTES: '' STORAGE_CLASS: local-storage STORAGE_HOST_ROUTES: '' SWIFT: config/samples/swift_v1beta1_swift.yaml SWIFT_BRANCH: main SWIFT_COMMIT_HASH: '' SWIFT_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml SWIFT_IMG: quay.io/openstack-k8s-operators/swift-operator-index:latest SWIFT_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml SWIFT_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests SWIFT_KUTTL_NAMESPACE: swift-kuttl-tests SWIFT_REPO: https://github.com/openstack-k8s-operators/swift-operator.git TELEMETRY: config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_BRANCH: main TELEMETRY_COMMIT_HASH: '' TELEMETRY_CR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml TELEMETRY_IMG: quay.io/openstack-k8s-operators/telemetry-operator-index:latest TELEMETRY_KUTTL_BASEDIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator TELEMETRY_KUTTL_CONF: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml TELEMETRY_KUTTL_DIR: /home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites TELEMETRY_KUTTL_NAMESPACE: telemetry-kuttl-tests TELEMETRY_KUTTL_RELPATH: test/kuttl/suites TELEMETRY_REPO: https://github.com/openstack-k8s-operators/telemetry-operator.git TENANT_HOST_ROUTES: '' TIMEOUT: 300s TLS_ENABLED: 'false' tripleo_deploy: 'export REGISTRY_USER:' cifmw_install_yamls_environment: BMO_SETUP: false CHECKOUT_FROM_OPENSTACK_REF: 'true' KUBECONFIG: /home/zuul/.crc/machines/crc/kubeconfig OPENSTACK_K8S_BRANCH: main OUT: /home/zuul/ci-framework-data/artifacts/manifests OUTPUT_DIR: /home/zuul/ci-framework-data/artifacts/edpm home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/custom-params.yml0000644000175000017500000000235115134437263030035 0ustar zuulzuulcifmw_architecture_repo: /home/zuul/src/github.com/openstack-k8s-operators/architecture cifmw_architecture_repo_relative: src/github.com/openstack-k8s-operators/architecture cifmw_artifacts_crc_sshkey: ~/.ssh/id_rsa cifmw_basedir: /home/zuul/ci-framework-data cifmw_default_dns_servers: - 1.1.1.1 - 8.8.8.8 cifmw_dlrn_report_result: false cifmw_installyamls_repos: /home/zuul/src/github.com/openstack-k8s-operators/install_yamls cifmw_installyamls_repos_relative: src/github.com/openstack-k8s-operators/install_yamls cifmw_nolog: true cifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_password: '12**********89' cifmw_openshift_setup_skip_internal_registry_tls_verify: true cifmw_openshift_skip_tls_verify: true cifmw_openshift_user: kubeadmin cifmw_openstack_k8s_operators_org_url: https://github.com/openstack-k8s-operators cifmw_openstack_namespace: openstack cifmw_path: /home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin cifmw_repo: /home/zuul/src/github.com/openstack-k8s-operators/ci-framework cifmw_repo_relative: src/github.com/openstack-k8s-operators/ci-framework cifmw_use_crc: false cifmw_use_libvirt: false cifmw_zuul_target_host: controller home/zuul/zuul-output/logs/ci-framework-data/artifacts/parameters/openshift-login-params.yml0000644000175000017500000000043015134411325031613 0ustar zuulzuulcifmw_openshift_api: api.crc.testing:6443 cifmw_openshift_context: default/api-crc-testing:6443/kubeadmin cifmw_openshift_kubeconfig: /home/zuul/.crc/machines/crc/kubeconfig cifmw_openshift_token: sha256~DRDkWcBbIdKn0u9Nxm-2md9dPVf-gVXLRNv2O8KcEp4 cifmw_openshift_user: kubeadmin home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible-facts.yml0000644000175000017500000005266515134432006025615 0ustar zuulzuul_ansible_facts_gathered: true all_ipv4_addresses: - 38.102.83.83 all_ipv6_addresses: - fe80::f816:3eff:fe4f:9ce2 ansible_local: {} apparmor: status: disabled architecture: x86_64 bios_date: 04/01/2014 bios_vendor: SeaBIOS bios_version: 1.15.0-1 board_asset_tag: NA board_name: NA board_serial: NA board_vendor: NA board_version: NA chassis_asset_tag: NA chassis_serial: NA chassis_vendor: QEMU chassis_version: pc-i440fx-6.2 cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=22ac9141-3960-4912-b20e-19fc8a328d40 crc_ci_bootstrap_instance_default_net_config: mtu: '1500' range: 192.168.122.0/24 router_net: '' transparent: true crc_ci_bootstrap_instance_nm_vlan_networks: - key: internal-api value: config_nm: false ip: 172.17.0.100 - key: storage value: config_nm: false ip: 172.18.0.100 - key: storage_mgmt value: config_nm: false ip: 172.20.0.100 - key: tenant value: config_nm: false ip: 172.19.0.100 crc_ci_bootstrap_instance_parent_port_create_yaml: admin_state_up: true allowed_address_pairs: [] binding_host_id: null binding_profile: {} binding_vif_details: {} binding_vif_type: null binding_vnic_type: normal created_at: '2026-01-22T11:53:24Z' data_plane_status: null description: '' device_id: '' device_owner: '' device_profile: null dns_assignment: - fqdn: host-192-168-122-100.openstacklocal. hostname: host-192-168-122-100 ip_address: 192.168.122.100 dns_domain: '' dns_name: '' extra_dhcp_opts: [] fixed_ips: - ip_address: 192.168.122.100 subnet_id: 7a5d6965-a3da-4c6f-bc86-42f95e634461 hardware_offload_type: null hints: '' id: 6ee4c47b-3df3-4385-88f3-b108a8f825dd ip_allocation: immediate mac_address: fa:16:3e:b5:53:e5 name: standalone-71c2ce28-24b8-4530-b131-72e04889a182 network_id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 numa_affinity_policy: null port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 propagate_uplink_status: null qos_network_policy_id: null qos_policy_id: null resource_request: null revision_number: 1 security_group_ids: [] status: DOWN tags: [] trunk_details: null trusted: null updated_at: '2026-01-22T11:53:24Z' crc_ci_bootstrap_network_name: zuul-ci-net-3911aa2e crc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:b2:1f:bf mtu: '1500' internal-api: connection: ci-private-network-20 iface: eth1.20 ip: 172.17.0.4/24 mac: 52:54:00:91:b6:a0 mtu: '1496' parent_iface: eth1 vlan: 20 storage: connection: ci-private-network-21 iface: eth1.21 ip: 172.18.0.4/24 mac: 52:54:00:8c:4a:57 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: eth1.23 ip: 172.20.0.4/24 mac: 52:54:00:e6:85:75 mtu: '1496' parent_iface: eth1 vlan: 23 tenant: connection: ci-private-network-22 iface: eth1.22 ip: 172.19.0.4/24 mac: 52:54:00:34:d6:b6 mtu: '1496' parent_iface: eth1 vlan: 22 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:ee:8d:ea mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:98:87:e6 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:fe:f9:a5 mtu: '1496' parent_iface: ens7 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: ens7.23 ip: 172.20.0.5/24 mac: 52:54:00:66:52:20 mtu: '1496' parent_iface: ens7 vlan: 23 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:c7:ca:3e mtu: '1496' parent_iface: ens7 vlan: 22 standalone: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:b5:53:e5 mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:87:a9:f6 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:4f:71:e3 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: iface: eth1.23 ip: 172.20.0.100/24 mac: 52:54:00:40:23:1f mtu: '1496' parent_iface: eth1 vlan: 23 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:f6:c7:15 mtu: '1496' parent_iface: eth1 vlan: 22 crc_ci_bootstrap_private_net_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2026-01-22T11:51:59Z' description: '' dns_domain: '' id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 ipv4_address_scope: null ipv6_address_scope: null is_default: false is_vlan_qinq: null is_vlan_transparent: true l2_adjacency: true mtu: 1500 name: zuul-ci-net-3911aa2e port_security_enabled: false project_id: 4b633c451ac74233be3721a3635275e5 provider:network_type: null provider:physical_network: null provider:segmentation_id: null qos_policy_id: null revision_number: 1 router:external: false segments: null shared: false status: ACTIVE subnets: [] tags: [] updated_at: '2026-01-22T11:51:59Z' crc_ci_bootstrap_private_router_create_yaml: admin_state_up: true availability_zone_hints: - nova availability_zones: [] created_at: '2026-01-22T11:52:05Z' description: '' enable_ndp_proxy: null external_gateway_info: null flavor_id: null id: 68d8267d-56ed-4ee3-aa04-a2f7bd2ffe0b name: zuul-ci-subnet-router-3911aa2e project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 1 routes: [] status: ACTIVE tags: [] tenant_id: 4b633c451ac74233be3721a3635275e5 updated_at: '2026-01-22T11:52:05Z' crc_ci_bootstrap_private_subnet_create_yaml: allocation_pools: - end: 192.168.122.254 start: 192.168.122.2 cidr: 192.168.122.0/24 created_at: '2026-01-22T11:52:03Z' description: '' dns_nameservers: [] dns_publish_fixed_ip: null enable_dhcp: false gateway_ip: 192.168.122.1 host_routes: [] id: 7a5d6965-a3da-4c6f-bc86-42f95e634461 ip_version: 4 ipv6_address_mode: null ipv6_ra_mode: null name: zuul-ci-subnet-3911aa2e network_id: b176f5a8-05bf-4260-94f7-c8fdaa6712e1 project_id: 4b633c451ac74233be3721a3635275e5 revision_number: 0 segment_id: null service_types: [] subnetpool_id: null tags: [] updated_at: '2026-01-22T11:52:03Z' crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 crc_ci_bootstrap_router_name: zuul-ci-subnet-router-3911aa2e crc_ci_bootstrap_subnet_name: zuul-ci-subnet-3911aa2e date_time: date: '2026-01-22' day: '22' epoch: '1769092100' epoch_int: '1769092100' hour: '14' iso8601: '2026-01-22T14:28:20Z' iso8601_basic: 20260122T142820041113 iso8601_basic_short: 20260122T142820 iso8601_micro: '2026-01-22T14:28:20.041113Z' minute: '28' month: '01' second: '20' time: '14:28:20' tz: UTC tz_dst: UTC tz_offset: '+0000' weekday: Thursday weekday_number: '4' weeknumber: '03' year: '2026' default_ipv4: address: 38.102.83.83 alias: eth0 broadcast: 38.102.83.255 gateway: 38.102.83.1 interface: eth0 macaddress: fa:16:3e:4f:9c:e2 mtu: 1500 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' type: ether default_ipv6: {} device_links: ids: sr0: - ata-QEMU_DVD-ROM_QM00001 labels: sr0: - config-2 masters: {} uuids: sr0: - 2026-01-22-11-49-38-00 vda1: - 22ac9141-3960-4912-b20e-19fc8a328d40 devices: sr0: holders: [] host: '' links: ids: - ata-QEMU_DVD-ROM_QM00001 labels: - config-2 masters: [] uuids: - 2026-01-22-11-49-38-00 model: QEMU DVD-ROM partitions: {} removable: '1' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: mq-deadline sectors: '964' sectorsize: '2048' size: 482.00 KB support_discard: '2048' vendor: QEMU virtual: 1 vda: holders: [] host: '' links: ids: [] labels: [] masters: [] uuids: [] model: null partitions: vda1: holders: [] links: ids: [] labels: [] masters: [] uuids: - 22ac9141-3960-4912-b20e-19fc8a328d40 sectors: '167770079' sectorsize: 512 size: 80.00 GB start: '2048' uuid: 22ac9141-3960-4912-b20e-19fc8a328d40 removable: '0' rotational: '1' sas_address: null sas_device_handle: null scheduler_mode: none sectors: '167772160' sectorsize: '512' size: 80.00 GB support_discard: '512' vendor: '0x1af4' virtual: 1 discovered_interpreter_python: /usr/bin/python3 distribution: CentOS distribution_file_parsed: true distribution_file_path: /etc/centos-release distribution_file_variety: CentOS distribution_major_version: '9' distribution_release: Stream distribution_version: '9' dns: nameservers: - 192.168.122.10 - 199.204.44.24 - 199.204.47.54 domain: '' effective_group_id: 1000 effective_user_id: 1000 env: ANSIBLE_LOG_PATH: /home/zuul/ci-framework-data/logs/e2e-collect-logs-must-gather.log BASH_FUNC_which%%: "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}" DBUS_SESSION_BUS_ADDRESS: unix:path=/run/user/1000/bus DEBUGINFOD_IMA_CERT_PATH: '/etc/keys/ima:' DEBUGINFOD_URLS: 'https://debuginfod.centos.org/ ' HOME: /home/zuul LANG: en_US.UTF-8 LESSOPEN: '||/usr/bin/lesspipe.sh %s' LOGNAME: zuul MOTD_SHOWN: pam PATH: /home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin PWD: /home/zuul SELINUX_LEVEL_REQUESTED: '' SELINUX_ROLE_REQUESTED: '' SELINUX_USE_CURRENT_RANGE: '' SHELL: /bin/bash SHLVL: '1' SSH_CLIENT: 38.102.83.114 60544 22 SSH_CONNECTION: 38.102.83.114 60544 38.102.83.83 22 USER: zuul XDG_RUNTIME_DIR: /run/user/1000 XDG_SESSION_CLASS: user XDG_SESSION_ID: '18' XDG_SESSION_TYPE: tty _: /usr/bin/python3 which_declare: declare -f eth0: active: true device: eth0 features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: off [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: 'on' rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: on [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: 'on' tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: off [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: off [fixed] tx_gso_partial: off [fixed] tx_gso_robust: on [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: 'off' tx_scatter_gather: 'on' tx_scatter_gather_fraglist: off [fixed] tx_sctp_segmentation: off [fixed] tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'off' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: off [fixed] tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: off [fixed] hw_timestamp_filters: [] ipv4: address: 38.102.83.83 broadcast: 38.102.83.255 netmask: 255.255.255.0 network: 38.102.83.0 prefix: '24' ipv6: - address: fe80::f816:3eff:fe4f:9ce2 prefix: '64' scope: link macaddress: fa:16:3e:4f:9c:e2 module: virtio_net mtu: 1500 pciid: virtio1 promisc: false speed: -1 timestamping: [] type: ether fibre_channel_wwn: [] fips: false form_factor: Other fqdn: controller gather_subset: - min hostname: controller hostnqn: nqn.2014-08.org.nvmexpress:uuid:5350774e-8b5e-4dba-80a9-92d405981c1d interfaces: - lo - eth0 is_chroot: false iscsi_iqn: '' kernel: 5.14.0-661.el9.x86_64 kernel_version: '#1 SMP PREEMPT_DYNAMIC Fri Jan 16 09:19:22 UTC 2026' lo: active: true device: lo features: esp_hw_offload: off [fixed] esp_tx_csum_hw_offload: off [fixed] generic_receive_offload: 'on' generic_segmentation_offload: 'on' highdma: on [fixed] hsr_dup_offload: off [fixed] hsr_fwd_offload: off [fixed] hsr_tag_ins_offload: off [fixed] hsr_tag_rm_offload: off [fixed] hw_tc_offload: off [fixed] l2_fwd_offload: off [fixed] large_receive_offload: off [fixed] loopback: on [fixed] macsec_hw_offload: off [fixed] ntuple_filters: off [fixed] receive_hashing: off [fixed] rx_all: off [fixed] rx_checksumming: on [fixed] rx_fcs: off [fixed] rx_gro_hw: off [fixed] rx_gro_list: 'off' rx_udp_gro_forwarding: 'off' rx_udp_tunnel_port_offload: off [fixed] rx_vlan_filter: off [fixed] rx_vlan_offload: off [fixed] rx_vlan_stag_filter: off [fixed] rx_vlan_stag_hw_parse: off [fixed] scatter_gather: 'on' tcp_segmentation_offload: 'on' tls_hw_record: off [fixed] tls_hw_rx_offload: off [fixed] tls_hw_tx_offload: off [fixed] tx_checksum_fcoe_crc: off [fixed] tx_checksum_ip_generic: on [fixed] tx_checksum_ipv4: off [fixed] tx_checksum_ipv6: off [fixed] tx_checksum_sctp: on [fixed] tx_checksumming: 'on' tx_esp_segmentation: off [fixed] tx_fcoe_segmentation: off [fixed] tx_gre_csum_segmentation: off [fixed] tx_gre_segmentation: off [fixed] tx_gso_list: 'on' tx_gso_partial: off [fixed] tx_gso_robust: off [fixed] tx_ipxip4_segmentation: off [fixed] tx_ipxip6_segmentation: off [fixed] tx_nocache_copy: off [fixed] tx_scatter_gather: on [fixed] tx_scatter_gather_fraglist: on [fixed] tx_sctp_segmentation: 'on' tx_tcp6_segmentation: 'on' tx_tcp_ecn_segmentation: 'on' tx_tcp_mangleid_segmentation: 'on' tx_tcp_segmentation: 'on' tx_tunnel_remcsum_segmentation: off [fixed] tx_udp_segmentation: 'on' tx_udp_tnl_csum_segmentation: off [fixed] tx_udp_tnl_segmentation: off [fixed] tx_vlan_offload: off [fixed] tx_vlan_stag_hw_insert: off [fixed] vlan_challenged: on [fixed] hw_timestamp_filters: [] ipv4: address: 127.0.0.1 broadcast: '' netmask: 255.0.0.0 network: 127.0.0.0 prefix: '8' ipv6: - address: ::1 prefix: '128' scope: host mtu: 65536 promisc: false timestamping: [] type: loopback loadavg: 15m: 0.05 1m: 0.39 5m: 0.14 locally_reachable_ips: ipv4: - 38.102.83.83 - 127.0.0.0/8 - 127.0.0.1 ipv6: - ::1 - fe80::f816:3eff:fe4f:9ce2 lsb: {} lvm: N/A machine: x86_64 machine_id: 85ac68c10a6e7ae08ceb898dbdca0cb5 memfree_mb: 7084 memory_mb: nocache: free: 7292 used: 387 real: free: 7084 total: 7679 used: 595 swap: cached: 0 free: 0 total: 0 used: 0 memtotal_mb: 7679 module_setup: true mounts: - block_available: 20341226 block_size: 4096 block_total: 20954875 block_used: 613649 device: /dev/vda1 fstype: xfs inode_available: 41888619 inode_total: 41942512 inode_used: 53893 mount: / options: rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota size_available: 83317661696 size_total: 85831168000 uuid: 22ac9141-3960-4912-b20e-19fc8a328d40 nodename: controller os_family: RedHat pkg_mgr: dnf proc_cmdline: BOOT_IMAGE: (hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 console: ttyS0,115200n8 crashkernel: 1G-2G:192M,2G-64G:256M,64G-:512M net.ifnames: '0' no_timer_check: true ro: true root: UUID=22ac9141-3960-4912-b20e-19fc8a328d40 processor: - '0' - AuthenticAMD - AMD EPYC-Rome Processor - '1' - AuthenticAMD - AMD EPYC-Rome Processor - '2' - AuthenticAMD - AMD EPYC-Rome Processor - '3' - AuthenticAMD - AMD EPYC-Rome Processor - '4' - AuthenticAMD - AMD EPYC-Rome Processor - '5' - AuthenticAMD - AMD EPYC-Rome Processor - '6' - AuthenticAMD - AMD EPYC-Rome Processor - '7' - AuthenticAMD - AMD EPYC-Rome Processor processor_cores: 1 processor_count: 8 processor_nproc: 8 processor_threads_per_core: 1 processor_vcpus: 8 product_name: OpenStack Nova product_serial: NA product_uuid: NA product_version: 26.3.1 python: executable: /usr/bin/python3 has_sslcontext: true type: cpython version: major: 3 micro: 25 minor: 9 releaselevel: final serial: 0 version_info: - 3 - 9 - 25 - final - 0 python_version: 3.9.25 real_group_id: 1000 real_user_id: 1000 selinux: config_mode: enforcing mode: enforcing policyvers: 33 status: enabled type: targeted selinux_python_present: true service_mgr: systemd ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMKcpNrJpAx4CrcANXc7Mn4hy3MVnmG3hOgRXXsREK9Nr1ubSroZOpt4L9Fqy2kwyrYygLJWw/0Ub6eRiHYEsC8= ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256 ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIFnFPB1/0DZCQsBc2ZzNHDFdrc4p6KHouz9T+kXxP6Mv ssh_host_key_ed25519_public_keytype: ssh-ed25519 ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQC6HMCq1xxuJz4wWSauIns7H1KBFyS3Tk+19hW3WD1ndZ/bhbUbu/Rs5Ec0vOmp7s7EkHiuKXTatx4MjNzbUn6HsmYAZ8KDmxg1tlwm0YfSXEOBPVYxQj4PBNn4QD4CYpKyS1zS8Ely7Yym8eZ095ZwSz/BgCg/D7Tv5gEKNlMDY+Nh3eHp0yZHUCUd5znPesQynCClZjvXcU2ufx+FkKmwymn+yuVvC/scPXO17zyVxad783DV/4CFZEQD7NCTT0eX/8JTKhIBcNZlSHVmJZmRhdbl6ZKos4vLh3TecvWjEXfNuQL/Kco5I0KsbwWlSnBR0WPyVi06rg+gGruQdT2NEqJy9cGWkTLILsm201OrkG2ctFFqBnNqhSSHFCdHr89m2f5+FIx6vuPKo41YBK4Adz0RWxmR2/gZEIz7eVaPTNVtGS173eNxki861y7wowKHtZfqyo5YrgCJvtHDXprxnPqw6uMoOCOwCjfOV6IgjQfNH/R2CrKf9LRyF2/9pys= ssh_host_key_rsa_public_keytype: ssh-rsa swapfree_mb: 0 swaptotal_mb: 0 system: Linux system_capabilities: - '' system_capabilities_enforced: 'True' system_vendor: OpenStack Foundation uptime_seconds: 74 user_dir: /home/zuul user_gecos: '' user_gid: 1000 user_id: zuul user_shell: /bin/bash user_uid: 1000 userspace_architecture: x86_64 userspace_bits: '64' virtualization_role: guest virtualization_tech_guest: - openstack virtualization_tech_host: - kvm virtualization_type: openstack zuul_change_list: - data-plane-adoption home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/0000755000175000017500000000000015134437263025102 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/delorean.repo.md50000644000175000017500000000004115134411162030227 0ustar zuulzuulc3923531bcda0b0811b2d5053f189beb home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-appstream.repo0000644000175000017500000000031615134411163033346 0ustar zuulzuul [repo-setup-centos-appstream] name=repo-setup-centos-appstream baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/AppStream/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-baseos.repo0000644000175000017500000000030415134411163032623 0ustar zuulzuul [repo-setup-centos-baseos] name=repo-setup-centos-baseos baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/BaseOS/$basearch/os/ gpgcheck=0 enabled=1 ././@LongLink0000644000000000000000000000015400000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-highavailability.repohome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-highavailabili0000644000175000017500000000034215134411163033342 0ustar zuulzuul [repo-setup-centos-highavailability] name=repo-setup-centos-highavailability baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/HighAvailability/$basearch/os/ gpgcheck=0 enabled=1 ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-powertools.repohome/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/repo-setup-centos-powertools.rep0000644000175000017500000000031115134411163033403 0ustar zuulzuul [repo-setup-centos-powertools] name=repo-setup-centos-powertools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/CRB/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/delorean-antelope-testing.repo0000644000175000017500000000317215134411163033034 0ustar zuulzuul[delorean-antelope-testing] name=dlrn-antelope-testing baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [delorean-antelope-build-deps] name=dlrn-antelope-build-deps baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/build-deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-rabbitmq] name=centos9-rabbitmq baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/messaging/$basearch/rabbitmq-38/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-storage] name=centos9-storage baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/storage/$basearch/ceph-reef/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-opstools] name=centos9-opstools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/opstools/$basearch/collectd-5/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-nfv-ovs] name=NFV SIG OpenvSwitch baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/nfv/$basearch/openvswitch-2/ gpgcheck=0 enabled=1 module_hotfixes=1 # epel is required for Ceph Reef [epel-low-priority] name=Extra Packages for Enterprise Linux $releasever - $basearch metalink=https://mirrors.fedoraproject.org/metalink?repo=epel-$releasever&arch=$basearch&infra=$infra&content=$contentdir enabled=1 gpgcheck=0 countme=1 priority=100 includepkgs=libarrow*,parquet*,python3-asyncssh,re2,python3-grpcio,grpc*,abseil*,thrift*,blake3 home/zuul/zuul-output/logs/ci-framework-data/artifacts/repositories/delorean.repo0000644000175000017500000001341515134411163027555 0ustar zuulzuul[delorean-component-barbican] name=delorean-openstack-barbican-42b4c41831408a8e323fec3c8983b5c793b64874 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/barbican/42/b4/42b4c41831408a8e323fec3c8983b5c793b64874_08052e9d enabled=1 gpgcheck=0 priority=1 [delorean-component-baremetal] name=delorean-python-glean-10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/baremetal/10/df/10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7_36137eb3 enabled=1 gpgcheck=0 priority=1 [delorean-component-cinder] name=delorean-openstack-cinder-1c00d6490d88e436f26efb71f2ac96e75252e97c baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cinder/1c/00/1c00d6490d88e436f26efb71f2ac96e75252e97c_f716f000 enabled=1 gpgcheck=0 priority=1 [delorean-component-clients] name=delorean-python-stevedore-c4acc5639fd2329372142e39464fcca0209b0018 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/clients/c4/ac/c4acc5639fd2329372142e39464fcca0209b0018_d3ef8337 enabled=1 gpgcheck=0 priority=1 [delorean-component-cloudops] name=delorean-python-cloudkitty-tests-tempest-2c80f80e02c5accd099187ea762c8f8389bd7905 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cloudops/2c/80/2c80f80e02c5accd099187ea762c8f8389bd7905_33e4dd93 enabled=1 gpgcheck=0 priority=1 [delorean-component-common] name=delorean-os-refresh-config-9bfc52b5049be2d8de6134d662fdde9dfa48960f baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/common/9b/fc/9bfc52b5049be2d8de6134d662fdde9dfa48960f_b85780e6 enabled=1 gpgcheck=0 priority=1 [delorean-component-compute] name=delorean-openstack-nova-6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/compute/6f/8d/6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e_dc05b899 enabled=1 gpgcheck=0 priority=1 [delorean-component-designate] name=delorean-python-designate-tests-tempest-347fdbc9b4595a10b726526b3c0b5928e5b7fcf2 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/designate/34/7f/347fdbc9b4595a10b726526b3c0b5928e5b7fcf2_3fd39337 enabled=1 gpgcheck=0 priority=1 [delorean-component-glance] name=delorean-openstack-glance-1fd12c29b339f30fe823e2b5beba14b5f241e52a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/glance/1f/d1/1fd12c29b339f30fe823e2b5beba14b5f241e52a_0d693729 enabled=1 gpgcheck=0 priority=1 [delorean-component-keystone] name=delorean-openstack-keystone-e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/keystone/e4/b4/e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7_264c03cc enabled=1 gpgcheck=0 priority=1 [delorean-component-manila] name=delorean-openstack-manila-3c01b7181572c95dac462eb19c3121e36cb0fe95 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/manila/3c/01/3c01b7181572c95dac462eb19c3121e36cb0fe95_912dfd18 enabled=1 gpgcheck=0 priority=1 [delorean-component-network] name=delorean-python-whitebox-neutron-tests-tempest-12cf06ce36a79a584fc757f4c25ff96845573c93 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/network/12/cf/12cf06ce36a79a584fc757f4c25ff96845573c93_3ed3aba3 enabled=1 gpgcheck=0 priority=1 [delorean-component-octavia] name=delorean-openstack-octavia-ba397f07a7331190208c93368ee23826ac4e2707 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/octavia/ba/39/ba397f07a7331190208c93368ee23826ac4e2707_9d6e596a enabled=1 gpgcheck=0 priority=1 [delorean-component-optimize] name=delorean-openstack-watcher-c014f81a8647287f6dcc339321c1256f5a2e82d5 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/optimize/c0/14/c014f81a8647287f6dcc339321c1256f5a2e82d5_bcbfdccc enabled=1 gpgcheck=0 priority=1 [delorean-component-podified] name=delorean-ansible-config_template-5ccaa22121a7ff05620975540d81f6efb077d8db baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/podified/5c/ca/5ccaa22121a7ff05620975540d81f6efb077d8db_83eb7cc2 enabled=1 gpgcheck=0 priority=1 [delorean-component-puppet] name=delorean-puppet-ceph-7352068d7b8c84ded636ab3158dafa6f3851951e baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/puppet/73/52/7352068d7b8c84ded636ab3158dafa6f3851951e_7cde1ad1 enabled=1 gpgcheck=0 priority=1 [delorean-component-swift] name=delorean-openstack-swift-dc98a8463506ac520c469adb0ef47d0f7753905a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/swift/dc/98/dc98a8463506ac520c469adb0ef47d0f7753905a_9d02f069 enabled=1 gpgcheck=0 priority=1 [delorean-component-tempest] name=delorean-python-tempestconf-8515371b7cceebd4282e09f1d8f0cc842df82855 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/tempest/85/15/8515371b7cceebd4282e09f1d8f0cc842df82855_a1e336c7 enabled=1 gpgcheck=0 priority=1 [delorean-component-ui] name=delorean-openstack-heat-ui-013accbfd179753bc3f0d1f4e5bed07a4fd9f771 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/ui/01/3a/013accbfd179753bc3f0d1f4e5bed07a4fd9f771_0c88e467 enabled=1 gpgcheck=0 priority=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/install_yamls.sh0000644000175000017500000000033215134411252025546 0ustar zuulzuulexport BMO_SETUP=False export OUT=/home/zuul/ci-framework-data/artifacts/manifests export OUTPUT_DIR=/home/zuul/ci-framework-data/artifacts/edpm export CHECKOUT_FROM_OPENSTACK_REF=true export OPENSTACK_K8S_BRANCH=main home/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/0000755000175000017500000000000015134437263025277 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ens3.nmconnection0000644000175000017500000000026215134432007030552 0ustar zuulzuul[connection] id=ens3 uuid=e94616dd-9c37-40d7-ae7c-f2a49628045b type=ethernet interface-name=ens3 [ethernet] [ipv4] method=auto [ipv6] addr-gen-mode=eui64 method=auto [proxy] ././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network.nmconnectionhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network.nmconnectio0000644000175000017500000000051315134432007033255 0ustar zuulzuul[connection] id=ci-private-network uuid=49a09e2a-1c35-5dfb-939a-485407ec59cc type=ethernet autoconnect=true interface-name=eth1 [ethernet] mac-address=fa:16:3e:b2:1f:bf mtu=1500 [ipv4] method=manual addresses=192.168.122.11/24 never-default=true gateway=192.168.122.1 [ipv6] addr-gen-mode=stable-privacy method=disabled [proxy] ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network-20.nmconnectionhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network-20.nmconnec0000644000175000017500000000051115134432007032756 0ustar zuulzuul[connection] id=ci-private-network-20 uuid=d098fa4a-2eea-56b0-9575-e256eba1b34b type=vlan autoconnect=true interface-name=eth1.20 [ethernet] cloned-mac-address=52:54:00:91:b6:a0 mtu=1496 [vlan] flags=1 id=20 parent=eth1 [ipv4] method=manual addresses=172.17.0.4/24 [ipv6] addr-gen-mode=stable-privacy method=disabled [proxy] ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network-21.nmconnectionhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network-21.nmconnec0000644000175000017500000000051115134432007032757 0ustar zuulzuul[connection] id=ci-private-network-21 uuid=895bbe7d-7474-5695-a746-2dbfe71c929e type=vlan autoconnect=true interface-name=eth1.21 [ethernet] cloned-mac-address=52:54:00:8c:4a:57 mtu=1496 [vlan] flags=1 id=21 parent=eth1 [ipv4] method=manual addresses=172.18.0.4/24 [ipv6] addr-gen-mode=stable-privacy method=disabled [proxy] ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network-23.nmconnectionhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network-23.nmconnec0000644000175000017500000000051115134432007032761 0ustar zuulzuul[connection] id=ci-private-network-23 uuid=f45b5e94-8e8d-50da-b2f6-bee1c43aec1e type=vlan autoconnect=true interface-name=eth1.23 [ethernet] cloned-mac-address=52:54:00:e6:85:75 mtu=1496 [vlan] flags=1 id=23 parent=eth1 [ipv4] method=manual addresses=172.20.0.4/24 [ipv6] addr-gen-mode=stable-privacy method=disabled [proxy] ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network-22.nmconnectionhome/zuul/zuul-output/logs/ci-framework-data/artifacts/NetworkManager/ci-private-network-22.nmconnec0000644000175000017500000000051115134432007032760 0ustar zuulzuul[connection] id=ci-private-network-22 uuid=99b86f40-9c5c-5b02-a730-339caf260c08 type=vlan autoconnect=true interface-name=eth1.22 [ethernet] cloned-mac-address=52:54:00:34:d6:b6 mtu=1496 [vlan] flags=1 id=22 parent=eth1 [ipv4] method=manual addresses=172.19.0.4/24 [ipv6] addr-gen-mode=stable-privacy method=disabled [proxy] home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/0000755000175000017500000000000015134437263024375 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/delorean-antelope-testing.repo0000644000175000017500000000317215134432007032327 0ustar zuulzuul[delorean-antelope-testing] name=dlrn-antelope-testing baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [delorean-antelope-build-deps] name=dlrn-antelope-build-deps baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/build-deps/latest/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-rabbitmq] name=centos9-rabbitmq baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/messaging/$basearch/rabbitmq-38/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-storage] name=centos9-storage baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/storage/$basearch/ceph-reef/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-opstools] name=centos9-opstools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/opstools/$basearch/collectd-5/ enabled=1 gpgcheck=0 module_hotfixes=1 [centos9-nfv-ovs] name=NFV SIG OpenvSwitch baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/SIGs/9-stream/nfv/$basearch/openvswitch-2/ gpgcheck=0 enabled=1 module_hotfixes=1 # epel is required for Ceph Reef [epel-low-priority] name=Extra Packages for Enterprise Linux $releasever - $basearch metalink=https://mirrors.fedoraproject.org/metalink?repo=epel-$releasever&arch=$basearch&infra=$infra&content=$contentdir enabled=1 gpgcheck=0 countme=1 priority=100 includepkgs=libarrow*,parquet*,python3-asyncssh,re2,python3-grpcio,grpc*,abseil*,thrift*,blake3 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/delorean.repo0000644000175000017500000001341515134432007027050 0ustar zuulzuul[delorean-component-barbican] name=delorean-openstack-barbican-42b4c41831408a8e323fec3c8983b5c793b64874 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/barbican/42/b4/42b4c41831408a8e323fec3c8983b5c793b64874_08052e9d enabled=1 gpgcheck=0 priority=1 [delorean-component-baremetal] name=delorean-python-glean-10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/baremetal/10/df/10df0bd91b9bc5c9fd9cc02d75c0084cd4da29a7_36137eb3 enabled=1 gpgcheck=0 priority=1 [delorean-component-cinder] name=delorean-openstack-cinder-1c00d6490d88e436f26efb71f2ac96e75252e97c baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cinder/1c/00/1c00d6490d88e436f26efb71f2ac96e75252e97c_f716f000 enabled=1 gpgcheck=0 priority=1 [delorean-component-clients] name=delorean-python-stevedore-c4acc5639fd2329372142e39464fcca0209b0018 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/clients/c4/ac/c4acc5639fd2329372142e39464fcca0209b0018_d3ef8337 enabled=1 gpgcheck=0 priority=1 [delorean-component-cloudops] name=delorean-python-cloudkitty-tests-tempest-2c80f80e02c5accd099187ea762c8f8389bd7905 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/cloudops/2c/80/2c80f80e02c5accd099187ea762c8f8389bd7905_33e4dd93 enabled=1 gpgcheck=0 priority=1 [delorean-component-common] name=delorean-os-refresh-config-9bfc52b5049be2d8de6134d662fdde9dfa48960f baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/common/9b/fc/9bfc52b5049be2d8de6134d662fdde9dfa48960f_b85780e6 enabled=1 gpgcheck=0 priority=1 [delorean-component-compute] name=delorean-openstack-nova-6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/compute/6f/8d/6f8decf0b4f1aa2e96292b6a2ffc28249fe4af5e_dc05b899 enabled=1 gpgcheck=0 priority=1 [delorean-component-designate] name=delorean-python-designate-tests-tempest-347fdbc9b4595a10b726526b3c0b5928e5b7fcf2 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/designate/34/7f/347fdbc9b4595a10b726526b3c0b5928e5b7fcf2_3fd39337 enabled=1 gpgcheck=0 priority=1 [delorean-component-glance] name=delorean-openstack-glance-1fd12c29b339f30fe823e2b5beba14b5f241e52a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/glance/1f/d1/1fd12c29b339f30fe823e2b5beba14b5f241e52a_0d693729 enabled=1 gpgcheck=0 priority=1 [delorean-component-keystone] name=delorean-openstack-keystone-e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/keystone/e4/b4/e4b40af0ae3698fbbbbfb8c22468b33aae80e6d7_264c03cc enabled=1 gpgcheck=0 priority=1 [delorean-component-manila] name=delorean-openstack-manila-3c01b7181572c95dac462eb19c3121e36cb0fe95 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/manila/3c/01/3c01b7181572c95dac462eb19c3121e36cb0fe95_912dfd18 enabled=1 gpgcheck=0 priority=1 [delorean-component-network] name=delorean-python-whitebox-neutron-tests-tempest-12cf06ce36a79a584fc757f4c25ff96845573c93 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/network/12/cf/12cf06ce36a79a584fc757f4c25ff96845573c93_3ed3aba3 enabled=1 gpgcheck=0 priority=1 [delorean-component-octavia] name=delorean-openstack-octavia-ba397f07a7331190208c93368ee23826ac4e2707 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/octavia/ba/39/ba397f07a7331190208c93368ee23826ac4e2707_9d6e596a enabled=1 gpgcheck=0 priority=1 [delorean-component-optimize] name=delorean-openstack-watcher-c014f81a8647287f6dcc339321c1256f5a2e82d5 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/optimize/c0/14/c014f81a8647287f6dcc339321c1256f5a2e82d5_bcbfdccc enabled=1 gpgcheck=0 priority=1 [delorean-component-podified] name=delorean-ansible-config_template-5ccaa22121a7ff05620975540d81f6efb077d8db baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/podified/5c/ca/5ccaa22121a7ff05620975540d81f6efb077d8db_83eb7cc2 enabled=1 gpgcheck=0 priority=1 [delorean-component-puppet] name=delorean-puppet-ceph-7352068d7b8c84ded636ab3158dafa6f3851951e baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/puppet/73/52/7352068d7b8c84ded636ab3158dafa6f3851951e_7cde1ad1 enabled=1 gpgcheck=0 priority=1 [delorean-component-swift] name=delorean-openstack-swift-dc98a8463506ac520c469adb0ef47d0f7753905a baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/swift/dc/98/dc98a8463506ac520c469adb0ef47d0f7753905a_9d02f069 enabled=1 gpgcheck=0 priority=1 [delorean-component-tempest] name=delorean-python-tempestconf-8515371b7cceebd4282e09f1d8f0cc842df82855 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/tempest/85/15/8515371b7cceebd4282e09f1d8f0cc842df82855_a1e336c7 enabled=1 gpgcheck=0 priority=1 [delorean-component-ui] name=delorean-openstack-heat-ui-013accbfd179753bc3f0d1f4e5bed07a4fd9f771 baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org:8080/rdo//centos9-antelope/component/ui/01/3a/013accbfd179753bc3f0d1f4e5bed07a4fd9f771_0c88e467 enabled=1 gpgcheck=0 priority=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/delorean.repo.md50000644000175000017500000000004115134432007027523 0ustar zuulzuulc3923531bcda0b0811b2d5053f189beb home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-appstream.repo0000644000175000017500000000031615134432007032641 0ustar zuulzuul [repo-setup-centos-appstream] name=repo-setup-centos-appstream baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/AppStream/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-baseos.repo0000644000175000017500000000030415134432007032116 0ustar zuulzuul [repo-setup-centos-baseos] name=repo-setup-centos-baseos baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/BaseOS/$basearch/os/ gpgcheck=0 enabled=1 ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-highavailability.repohome/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-highavailability.0000644000175000017500000000034215134432007033270 0ustar zuulzuul [repo-setup-centos-highavailability] name=repo-setup-centos-highavailability baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/HighAvailability/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/yum_repos/repo-setup-centos-powertools.repo0000644000175000017500000000031115134432007033055 0ustar zuulzuul [repo-setup-centos-powertools] name=repo-setup-centos-powertools baseurl=http://mirror.regionone.vexxhost-nodepool-tripleo.rdoproject.org/centos-stream/9-stream/CRB/$basearch/os/ gpgcheck=0 enabled=1 home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2026-01-22_15-14/0000777000175000017500000000000015134437343026731 5ustar zuulzuul././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2026-01-22_15-14/ansible_facts_cache/home/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2026-01-22_15-14/ansible_facts_0000755000175000017500000000000015134437343031602 5ustar zuulzuul././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2026-01-22_15-14/ansible_facts_cache/localhosthome/zuul/zuul-output/logs/ci-framework-data/artifacts/ansible_facts.2026-01-22_15-14/ansible_facts_0000644000175000017500000021615315134437343031614 0ustar zuulzuul{ "_ansible_facts_gathered": true, "ansible_all_ipv4_addresses": [ "192.168.122.11", "172.19.0.4", "172.17.0.4", "172.20.0.4", "38.102.83.83", "172.18.0.4" ], "ansible_all_ipv6_addresses": [ "fe80::f816:3eff:fe4f:9ce2" ], "ansible_apparmor": { "status": "disabled" }, "ansible_architecture": "x86_64", "ansible_bios_date": "04/01/2014", "ansible_bios_vendor": "SeaBIOS", "ansible_bios_version": "1.15.0-1", "ansible_board_asset_tag": "NA", "ansible_board_name": "NA", "ansible_board_serial": "NA", "ansible_board_vendor": "NA", "ansible_board_version": "NA", "ansible_chassis_asset_tag": "NA", "ansible_chassis_serial": "NA", "ansible_chassis_vendor": "QEMU", "ansible_chassis_version": "pc-i440fx-6.2", "ansible_cmdline": { "BOOT_IMAGE": "(hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64", "console": "ttyS0,115200n8", "crashkernel": "1G-2G:192M,2G-64G:256M,64G-:512M", "net.ifnames": "0", "no_timer_check": true, "ro": true, "root": "UUID=22ac9141-3960-4912-b20e-19fc8a328d40" }, "ansible_date_time": { "date": "2026-01-22", "day": "22", "epoch": "1769083487", "epoch_int": "1769083487", "hour": "12", "iso8601": "2026-01-22T12:04:47Z", "iso8601_basic": "20260122T120447978515", "iso8601_basic_short": "20260122T120447", "iso8601_micro": "2026-01-22T12:04:47.978515Z", "minute": "04", "month": "01", "second": "47", "time": "12:04:47", "tz": "UTC", "tz_dst": "UTC", "tz_offset": "+0000", "weekday": "Thursday", "weekday_number": "4", "weeknumber": "03", "year": "2026" }, "ansible_default_ipv4": { "address": "38.102.83.83", "alias": "eth0", "broadcast": "38.102.83.255", "gateway": "38.102.83.1", "interface": "eth0", "macaddress": "fa:16:3e:4f:9c:e2", "mtu": 1500, "netmask": "255.255.255.0", "network": "38.102.83.0", "prefix": "24", "type": "ether" }, "ansible_default_ipv6": {}, "ansible_device_links": { "ids": { "sr0": [ "ata-QEMU_DVD-ROM_QM00001" ] }, "labels": { "sr0": [ "config-2" ] }, "masters": {}, "uuids": { "sr0": [ "2026-01-22-11-49-38-00" ], "vda1": [ "22ac9141-3960-4912-b20e-19fc8a328d40" ] } }, "ansible_devices": { "sr0": { "holders": [], "host": "", "links": { "ids": [ "ata-QEMU_DVD-ROM_QM00001" ], "labels": [ "config-2" ], "masters": [], "uuids": [ "2026-01-22-11-49-38-00" ] }, "model": "QEMU DVD-ROM", "partitions": {}, "removable": "1", "rotational": "1", "sas_address": null, "sas_device_handle": null, "scheduler_mode": "mq-deadline", "sectors": "964", "sectorsize": "2048", "size": "482.00 KB", "support_discard": "2048", "vendor": "QEMU", "virtual": 1 }, "vda": { "holders": [], "host": "", "links": { "ids": [], "labels": [], "masters": [], "uuids": [] }, "model": null, "partitions": { "vda1": { "holders": [], "links": { "ids": [], "labels": [], "masters": [], "uuids": [ "22ac9141-3960-4912-b20e-19fc8a328d40" ] }, "sectors": "167770079", "sectorsize": 512, "size": "80.00 GB", "start": "2048", "uuid": "22ac9141-3960-4912-b20e-19fc8a328d40" } }, "removable": "0", "rotational": "1", "sas_address": null, "sas_device_handle": null, "scheduler_mode": "none", "sectors": "167772160", "sectorsize": "512", "size": "80.00 GB", "support_discard": "512", "vendor": "0x1af4", "virtual": 1 } }, "ansible_distribution": "CentOS", "ansible_distribution_file_parsed": true, "ansible_distribution_file_path": "/etc/centos-release", "ansible_distribution_file_variety": "CentOS", "ansible_distribution_major_version": "9", "ansible_distribution_release": "Stream", "ansible_distribution_version": "9", "ansible_dns": { "nameservers": [ "192.168.122.10", "199.204.44.24", "199.204.47.54" ] }, "ansible_domain": "", "ansible_effective_group_id": 1000, "ansible_effective_user_id": 1000, "ansible_env": { "BASH_FUNC_which%%": "() { ( alias;\n eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n}", "DBUS_SESSION_BUS_ADDRESS": "unix:path=/run/user/1000/bus", "DEBUGINFOD_IMA_CERT_PATH": "/etc/keys/ima:", "DEBUGINFOD_URLS": "https://debuginfod.centos.org/ ", "HOME": "/home/zuul", "LANG": "en_US.UTF-8", "LESSOPEN": "||/usr/bin/lesspipe.sh %s", "LOGNAME": "zuul", "MOTD_SHOWN": "pam", "PATH": "/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin", "PWD": "/home/zuul/src/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption", "SELINUX_LEVEL_REQUESTED": "", "SELINUX_ROLE_REQUESTED": "", "SELINUX_USE_CURRENT_RANGE": "", "SHELL": "/bin/bash", "SHLVL": "2", "SSH_CLIENT": "38.102.83.114 34212 22", "SSH_CONNECTION": "38.102.83.114 34212 38.102.83.83 22", "USER": "zuul", "XDG_RUNTIME_DIR": "/run/user/1000", "XDG_SESSION_CLASS": "user", "XDG_SESSION_ID": "10", "XDG_SESSION_TYPE": "tty", "_": "/usr/bin/python3", "which_declare": "declare -f" }, "ansible_eth0": { "active": true, "device": "eth0", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "on", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "on [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "off [fixed]", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "on [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [fixed]", "tx_sctp_segmentation": "off [fixed]", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "off", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "off [fixed]", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "38.102.83.83", "broadcast": "38.102.83.255", "netmask": "255.255.255.0", "network": "38.102.83.0", "prefix": "24" }, "ipv6": [ { "address": "fe80::f816:3eff:fe4f:9ce2", "prefix": "64", "scope": "link" } ], "macaddress": "fa:16:3e:4f:9c:e2", "module": "virtio_net", "mtu": 1500, "pciid": "virtio1", "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_eth1": { "active": true, "device": "eth1", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "on", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "on [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "off [fixed]", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "on [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [fixed]", "tx_sctp_segmentation": "off [fixed]", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "off", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "off [fixed]", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "192.168.122.11", "broadcast": "192.168.122.255", "netmask": "255.255.255.0", "network": "192.168.122.0", "prefix": "24" }, "macaddress": "fa:16:3e:b2:1f:bf", "module": "virtio_net", "mtu": 1500, "pciid": "virtio5", "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_eth1.20": { "active": true, "device": "eth1.20", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "off [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "off [fixed]", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "off [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [requested on]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [requested on]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [requested on]", "tx_gre_csum_segmentation": "off [requested on]", "tx_gre_segmentation": "off [requested on]", "tx_gso_list": "on", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "off [fixed]", "tx_ipxip4_segmentation": "off [requested on]", "tx_ipxip6_segmentation": "off [requested on]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [requested on]", "tx_sctp_segmentation": "on", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "on", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "on", "tx_udp_tnl_csum_segmentation": "off [requested on]", "tx_udp_tnl_segmentation": "off [requested on]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "172.17.0.4", "broadcast": "172.17.0.255", "netmask": "255.255.255.0", "network": "172.17.0.0", "prefix": "24" }, "macaddress": "52:54:00:91:b6:a0", "mtu": 1496, "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_eth1.21": { "active": true, "device": "eth1.21", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "off [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "off [fixed]", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "off [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [requested on]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [requested on]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [requested on]", "tx_gre_csum_segmentation": "off [requested on]", "tx_gre_segmentation": "off [requested on]", "tx_gso_list": "on", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "off [fixed]", "tx_ipxip4_segmentation": "off [requested on]", "tx_ipxip6_segmentation": "off [requested on]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [requested on]", "tx_sctp_segmentation": "on", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "on", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "on", "tx_udp_tnl_csum_segmentation": "off [requested on]", "tx_udp_tnl_segmentation": "off [requested on]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "172.18.0.4", "broadcast": "172.18.0.255", "netmask": "255.255.255.0", "network": "172.18.0.0", "prefix": "24" }, "macaddress": "52:54:00:8c:4a:57", "mtu": 1496, "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_eth1.22": { "active": true, "device": "eth1.22", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "off [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "off [fixed]", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "off [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [requested on]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [requested on]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [requested on]", "tx_gre_csum_segmentation": "off [requested on]", "tx_gre_segmentation": "off [requested on]", "tx_gso_list": "on", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "off [fixed]", "tx_ipxip4_segmentation": "off [requested on]", "tx_ipxip6_segmentation": "off [requested on]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [requested on]", "tx_sctp_segmentation": "on", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "on", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "on", "tx_udp_tnl_csum_segmentation": "off [requested on]", "tx_udp_tnl_segmentation": "off [requested on]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "172.19.0.4", "broadcast": "172.19.0.255", "netmask": "255.255.255.0", "network": "172.19.0.0", "prefix": "24" }, "macaddress": "52:54:00:34:d6:b6", "mtu": 1496, "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_eth1.23": { "active": true, "device": "eth1.23", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "off [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "off [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "off [fixed]", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "off [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [requested on]", "tx_checksum_ip_generic": "on", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "off [requested on]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [requested on]", "tx_gre_csum_segmentation": "off [requested on]", "tx_gre_segmentation": "off [requested on]", "tx_gso_list": "on", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "off [fixed]", "tx_ipxip4_segmentation": "off [requested on]", "tx_ipxip6_segmentation": "off [requested on]", "tx_nocache_copy": "off", "tx_scatter_gather": "on", "tx_scatter_gather_fraglist": "off [requested on]", "tx_sctp_segmentation": "on", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "on", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "on", "tx_udp_tnl_csum_segmentation": "off [requested on]", "tx_udp_tnl_segmentation": "off [requested on]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "off [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "172.20.0.4", "broadcast": "172.20.0.255", "netmask": "255.255.255.0", "network": "172.20.0.0", "prefix": "24" }, "macaddress": "52:54:00:e6:85:75", "mtu": 1496, "promisc": false, "speed": -1, "timestamping": [], "type": "ether" }, "ansible_fibre_channel_wwn": [], "ansible_fips": false, "ansible_form_factor": "Other", "ansible_fqdn": "controller", "ansible_hostname": "controller", "ansible_hostnqn": "nqn.2014-08.org.nvmexpress:uuid:5350774e-8b5e-4dba-80a9-92d405981c1d", "ansible_interfaces": [ "lo", "eth1.22", "eth1", "eth1.23", "eth0", "eth1.20", "eth1.21" ], "ansible_is_chroot": false, "ansible_iscsi_iqn": "", "ansible_kernel": "5.14.0-661.el9.x86_64", "ansible_kernel_version": "#1 SMP PREEMPT_DYNAMIC Fri Jan 16 09:19:22 UTC 2026", "ansible_lo": { "active": true, "device": "lo", "features": { "esp_hw_offload": "off [fixed]", "esp_tx_csum_hw_offload": "off [fixed]", "generic_receive_offload": "on", "generic_segmentation_offload": "on", "highdma": "on [fixed]", "hsr_dup_offload": "off [fixed]", "hsr_fwd_offload": "off [fixed]", "hsr_tag_ins_offload": "off [fixed]", "hsr_tag_rm_offload": "off [fixed]", "hw_tc_offload": "off [fixed]", "l2_fwd_offload": "off [fixed]", "large_receive_offload": "off [fixed]", "loopback": "on [fixed]", "macsec_hw_offload": "off [fixed]", "ntuple_filters": "off [fixed]", "receive_hashing": "off [fixed]", "rx_all": "off [fixed]", "rx_checksumming": "on [fixed]", "rx_fcs": "off [fixed]", "rx_gro_hw": "off [fixed]", "rx_gro_list": "off", "rx_udp_gro_forwarding": "off", "rx_udp_tunnel_port_offload": "off [fixed]", "rx_vlan_filter": "off [fixed]", "rx_vlan_offload": "off [fixed]", "rx_vlan_stag_filter": "off [fixed]", "rx_vlan_stag_hw_parse": "off [fixed]", "scatter_gather": "on", "tcp_segmentation_offload": "on", "tls_hw_record": "off [fixed]", "tls_hw_rx_offload": "off [fixed]", "tls_hw_tx_offload": "off [fixed]", "tx_checksum_fcoe_crc": "off [fixed]", "tx_checksum_ip_generic": "on [fixed]", "tx_checksum_ipv4": "off [fixed]", "tx_checksum_ipv6": "off [fixed]", "tx_checksum_sctp": "on [fixed]", "tx_checksumming": "on", "tx_esp_segmentation": "off [fixed]", "tx_fcoe_segmentation": "off [fixed]", "tx_gre_csum_segmentation": "off [fixed]", "tx_gre_segmentation": "off [fixed]", "tx_gso_list": "on", "tx_gso_partial": "off [fixed]", "tx_gso_robust": "off [fixed]", "tx_ipxip4_segmentation": "off [fixed]", "tx_ipxip6_segmentation": "off [fixed]", "tx_nocache_copy": "off [fixed]", "tx_scatter_gather": "on [fixed]", "tx_scatter_gather_fraglist": "on [fixed]", "tx_sctp_segmentation": "on", "tx_tcp6_segmentation": "on", "tx_tcp_ecn_segmentation": "on", "tx_tcp_mangleid_segmentation": "on", "tx_tcp_segmentation": "on", "tx_tunnel_remcsum_segmentation": "off [fixed]", "tx_udp_segmentation": "on", "tx_udp_tnl_csum_segmentation": "off [fixed]", "tx_udp_tnl_segmentation": "off [fixed]", "tx_vlan_offload": "off [fixed]", "tx_vlan_stag_hw_insert": "off [fixed]", "vlan_challenged": "on [fixed]" }, "hw_timestamp_filters": [], "ipv4": { "address": "127.0.0.1", "broadcast": "", "netmask": "255.0.0.0", "network": "127.0.0.0", "prefix": "8" }, "ipv6": [ { "address": "::1", "prefix": "128", "scope": "host" } ], "mtu": 65536, "promisc": false, "timestamping": [], "type": "loopback" }, "ansible_loadavg": { "15m": 0.43, "1m": 1.16, "5m": 0.78 }, "ansible_local": {}, "ansible_locally_reachable_ips": { "ipv4": [ "38.102.83.83", "127.0.0.0/8", "127.0.0.1", "172.17.0.4", "172.18.0.4", "172.19.0.4", "172.20.0.4", "192.168.122.11" ], "ipv6": [ "::1", "fe80::f816:3eff:fe4f:9ce2" ] }, "ansible_lsb": {}, "ansible_lvm": "N/A", "ansible_machine": "x86_64", "ansible_machine_id": "85ac68c10a6e7ae08ceb898dbdca0cb5", "ansible_memfree_mb": 5307, "ansible_memory_mb": { "nocache": { "free": 6760, "used": 919 }, "real": { "free": 5307, "total": 7679, "used": 2372 }, "swap": { "cached": 0, "free": 0, "total": 0, "used": 0 } }, "ansible_memtotal_mb": 7679, "ansible_mounts": [ { "block_available": 20001938, "block_size": 4096, "block_total": 20954875, "block_used": 952937, "device": "/dev/vda1", "fstype": "xfs", "inode_available": 41795113, "inode_total": 41942512, "inode_used": 147399, "mount": "/", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 81927938048, "size_total": 85831168000, "uuid": "22ac9141-3960-4912-b20e-19fc8a328d40" } ], "ansible_nodename": "controller", "ansible_os_family": "RedHat", "ansible_pkg_mgr": "dnf", "ansible_proc_cmdline": { "BOOT_IMAGE": "(hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64", "console": "ttyS0,115200n8", "crashkernel": "1G-2G:192M,2G-64G:256M,64G-:512M", "net.ifnames": "0", "no_timer_check": true, "ro": true, "root": "UUID=22ac9141-3960-4912-b20e-19fc8a328d40" }, "ansible_processor": [ "0", "AuthenticAMD", "AMD EPYC-Rome Processor", "1", "AuthenticAMD", "AMD EPYC-Rome Processor", "2", "AuthenticAMD", "AMD EPYC-Rome Processor", "3", "AuthenticAMD", "AMD EPYC-Rome Processor", "4", "AuthenticAMD", "AMD EPYC-Rome Processor", "5", "AuthenticAMD", "AMD EPYC-Rome Processor", "6", "AuthenticAMD", "AMD EPYC-Rome Processor", "7", "AuthenticAMD", "AMD EPYC-Rome Processor" ], "ansible_processor_cores": 1, "ansible_processor_count": 8, "ansible_processor_nproc": 8, "ansible_processor_threads_per_core": 1, "ansible_processor_vcpus": 8, "ansible_product_name": "OpenStack Nova", "ansible_product_serial": "NA", "ansible_product_uuid": "NA", "ansible_product_version": "26.3.1", "ansible_python": { "executable": "/usr/bin/python3", "has_sslcontext": true, "type": "cpython", "version": { "major": 3, "micro": 25, "minor": 9, "releaselevel": "final", "serial": 0 }, "version_info": [ 3, 9, 25, "final", 0 ] }, "ansible_python_version": "3.9.25", "ansible_real_group_id": 1000, "ansible_real_user_id": 1000, "ansible_selinux": { "config_mode": "enforcing", "mode": "enforcing", "policyvers": 33, "status": "enabled", "type": "targeted" }, "ansible_selinux_python_present": true, "ansible_service_mgr": "systemd", "ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMKcpNrJpAx4CrcANXc7Mn4hy3MVnmG3hOgRXXsREK9Nr1ubSroZOpt4L9Fqy2kwyrYygLJWw/0Ub6eRiHYEsC8=", "ansible_ssh_host_key_ecdsa_public_keytype": "ecdsa-sha2-nistp256", "ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAIFnFPB1/0DZCQsBc2ZzNHDFdrc4p6KHouz9T+kXxP6Mv", "ansible_ssh_host_key_ed25519_public_keytype": "ssh-ed25519", "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABgQC6HMCq1xxuJz4wWSauIns7H1KBFyS3Tk+19hW3WD1ndZ/bhbUbu/Rs5Ec0vOmp7s7EkHiuKXTatx4MjNzbUn6HsmYAZ8KDmxg1tlwm0YfSXEOBPVYxQj4PBNn4QD4CYpKyS1zS8Ely7Yym8eZ095ZwSz/BgCg/D7Tv5gEKNlMDY+Nh3eHp0yZHUCUd5znPesQynCClZjvXcU2ufx+FkKmwymn+yuVvC/scPXO17zyVxad783DV/4CFZEQD7NCTT0eX/8JTKhIBcNZlSHVmJZmRhdbl6ZKos4vLh3TecvWjEXfNuQL/Kco5I0KsbwWlSnBR0WPyVi06rg+gGruQdT2NEqJy9cGWkTLILsm201OrkG2ctFFqBnNqhSSHFCdHr89m2f5+FIx6vuPKo41YBK4Adz0RWxmR2/gZEIz7eVaPTNVtGS173eNxki861y7wowKHtZfqyo5YrgCJvtHDXprxnPqw6uMoOCOwCjfOV6IgjQfNH/R2CrKf9LRyF2/9pys=", "ansible_ssh_host_key_rsa_public_keytype": "ssh-rsa", "ansible_swapfree_mb": 0, "ansible_swaptotal_mb": 0, "ansible_system": "Linux", "ansible_system_capabilities": [ "" ], "ansible_system_capabilities_enforced": "True", "ansible_system_vendor": "OpenStack Foundation", "ansible_uptime_seconds": 899, "ansible_user_dir": "/home/zuul", "ansible_user_gecos": "", "ansible_user_gid": 1000, "ansible_user_id": "zuul", "ansible_user_shell": "/bin/bash", "ansible_user_uid": 1000, "ansible_userspace_architecture": "x86_64", "ansible_userspace_bits": "64", "ansible_virtualization_role": "guest", "ansible_virtualization_tech_guest": [ "openstack" ], "ansible_virtualization_tech_host": [ "kvm" ], "ansible_virtualization_type": "openstack", "cifmw_artifacts_crc_sshkey": "~/.ssh/id_rsa", "cifmw_basedir": "/home/zuul/ci-framework-data", "cifmw_discovered_hash": "6b1f209ecc539dcfd8634a5c7786c6629def62c87865ceb38b6678fdd81d8a90", "cifmw_discovered_hash_algorithm": "sha256", "cifmw_discovered_image_name": "CentOS-Stream-GenericCloud-x86_64-9-latest.x86_64.qcow2", "cifmw_discovered_image_url": "https://cloud.centos.org/centos/9-stream/x86_64/images//CentOS-Stream-GenericCloud-x86_64-9-latest.x86_64.qcow2", "cifmw_install_yamls_defaults": { "ADOPTED_EXTERNAL_NETWORK": "172.21.1.0/24", "ADOPTED_INTERNALAPI_NETWORK": "172.17.1.0/24", "ADOPTED_STORAGEMGMT_NETWORK": "172.20.1.0/24", "ADOPTED_STORAGE_NETWORK": "172.18.1.0/24", "ADOPTED_TENANT_NETWORK": "172.9.1.0/24", "ANSIBLEEE": "config/samples/_v1beta1_ansibleee.yaml", "ANSIBLEEE_BRANCH": "main", "ANSIBLEEE_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/config/samples/_v1beta1_ansibleee.yaml", "ANSIBLEEE_IMG": "quay.io/openstack-k8s-operators/openstack-ansibleee-operator-index:latest", "ANSIBLEEE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/kuttl-test.yaml", "ANSIBLEEE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-ansibleee-operator/test/kuttl/tests", "ANSIBLEEE_KUTTL_NAMESPACE": "ansibleee-kuttl-tests", "ANSIBLEEE_REPO": "https://github.com/openstack-k8s-operators/openstack-ansibleee-operator", "ANSIBLEE_COMMIT_HASH": "", "BARBICAN": "config/samples/barbican_v1beta1_barbican.yaml", "BARBICAN_BRANCH": "main", "BARBICAN_COMMIT_HASH": "", "BARBICAN_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/config/samples/barbican_v1beta1_barbican.yaml", "BARBICAN_DEPL_IMG": "unused", "BARBICAN_IMG": "quay.io/openstack-k8s-operators/barbican-operator-index:latest", "BARBICAN_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/kuttl-test.yaml", "BARBICAN_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/barbican-operator/test/kuttl/tests", "BARBICAN_KUTTL_NAMESPACE": "barbican-kuttl-tests", "BARBICAN_REPO": "https://github.com/openstack-k8s-operators/barbican-operator.git", "BARBICAN_SERVICE_ENABLED": "true", "BARBICAN_SIMPLE_CRYPTO_ENCRYPTION_KEY": "sEFmdFjDUqRM2VemYslV5yGNWjokioJXsg8Nrlc3drU=", "BAREMETAL_BRANCH": "main", "BAREMETAL_COMMIT_HASH": "", "BAREMETAL_IMG": "quay.io/openstack-k8s-operators/openstack-baremetal-operator-index:latest", "BAREMETAL_OS_CONTAINER_IMG": "", "BAREMETAL_OS_IMG": "", "BAREMETAL_OS_IMG_TYPE": "", "BAREMETAL_REPO": "https://github.com/openstack-k8s-operators/openstack-baremetal-operator.git", "BAREMETAL_TIMEOUT": "20m", "BASH_IMG": "quay.io/openstack-k8s-operators/bash:latest", "BGP_ASN": "64999", "BGP_LEAF_1": "100.65.4.1", "BGP_LEAF_2": "100.64.4.1", "BGP_OVN_ROUTING": "false", "BGP_PEER_ASN": "64999", "BGP_SOURCE_IP": "172.30.4.2", "BGP_SOURCE_IP6": "f00d:f00d:f00d:f00d:f00d:f00d:f00d:42", "BMAAS_BRIDGE_IPV4_PREFIX": "172.20.1.2/24", "BMAAS_BRIDGE_IPV6_PREFIX": "fd00:bbbb::2/64", "BMAAS_INSTANCE_DISK_SIZE": "20", "BMAAS_INSTANCE_MEMORY": "4096", "BMAAS_INSTANCE_NAME_PREFIX": "crc-bmaas", "BMAAS_INSTANCE_NET_MODEL": "virtio", "BMAAS_INSTANCE_OS_VARIANT": "centos-stream9", "BMAAS_INSTANCE_VCPUS": "2", "BMAAS_INSTANCE_VIRT_TYPE": "kvm", "BMAAS_IPV4": "true", "BMAAS_IPV6": "false", "BMAAS_LIBVIRT_USER": "sushyemu", "BMAAS_METALLB_ADDRESS_POOL": "172.20.1.64/26", "BMAAS_METALLB_POOL_NAME": "baremetal", "BMAAS_NETWORK_IPV4_PREFIX": "172.20.1.1/24", "BMAAS_NETWORK_IPV6_PREFIX": "fd00:bbbb::1/64", "BMAAS_NETWORK_NAME": "crc-bmaas", "BMAAS_NODE_COUNT": "1", "BMAAS_OCP_INSTANCE_NAME": "crc", "BMAAS_REDFISH_PASSWORD": "password", "BMAAS_REDFISH_USERNAME": "admin", "BMAAS_ROUTE_LIBVIRT_NETWORKS": "crc-bmaas,crc,default", "BMAAS_SUSHY_EMULATOR_DRIVER": "libvirt", "BMAAS_SUSHY_EMULATOR_IMAGE": "quay.io/metal3-io/sushy-tools:latest", "BMAAS_SUSHY_EMULATOR_NAMESPACE": "sushy-emulator", "BMAAS_SUSHY_EMULATOR_OS_CLIENT_CONFIG_FILE": "/etc/openstack/clouds.yaml", "BMAAS_SUSHY_EMULATOR_OS_CLOUD": "openstack", "BMH_NAMESPACE": "openstack", "BMO_BRANCH": "release-0.9", "BMO_CLEANUP": "true", "BMO_COMMIT_HASH": "", "BMO_IPA_BRANCH": "stable/2024.1", "BMO_IRONIC_HOST": "192.168.122.10", "BMO_PROVISIONING_INTERFACE": "", "BMO_REPO": "https://github.com/metal3-io/baremetal-operator", "BMO_SETUP": false, "BMO_SETUP_ROUTE_REPLACE": "true", "BM_CTLPLANE_INTERFACE": "enp1s0", "BM_INSTANCE_MEMORY": "8192", "BM_INSTANCE_NAME_PREFIX": "edpm-compute-baremetal", "BM_INSTANCE_NAME_SUFFIX": "0", "BM_NETWORK_NAME": "default", "BM_NODE_COUNT": "1", "BM_ROOT_PASSWORD": "", "BM_ROOT_PASSWORD_SECRET": "", "CEILOMETER_CENTRAL_DEPL_IMG": "unused", "CEILOMETER_NOTIFICATION_DEPL_IMG": "unused", "CEPH_BRANCH": "release-1.15", "CEPH_CLIENT": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/toolbox.yaml", "CEPH_COMMON": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/common.yaml", "CEPH_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/cluster-test.yaml", "CEPH_CRDS": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/crds.yaml", "CEPH_IMG": "quay.io/ceph/demo:latest-squid", "CEPH_OP": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rook/deploy/examples/operator-openshift.yaml", "CEPH_REPO": "https://github.com/rook/rook.git", "CERTMANAGER_TIMEOUT": "300s", "CHECKOUT_FROM_OPENSTACK_REF": "true", "CINDER": "config/samples/cinder_v1beta1_cinder.yaml", "CINDERAPI_DEPL_IMG": "unused", "CINDERBKP_DEPL_IMG": "unused", "CINDERSCH_DEPL_IMG": "unused", "CINDERVOL_DEPL_IMG": "unused", "CINDER_BRANCH": "main", "CINDER_COMMIT_HASH": "", "CINDER_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/config/samples/cinder_v1beta1_cinder.yaml", "CINDER_IMG": "quay.io/openstack-k8s-operators/cinder-operator-index:latest", "CINDER_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/kuttl-test.yaml", "CINDER_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/cinder-operator/test/kuttl/tests", "CINDER_KUTTL_NAMESPACE": "cinder-kuttl-tests", "CINDER_REPO": "https://github.com/openstack-k8s-operators/cinder-operator.git", "CLEANUP_DIR_CMD": "rm -Rf", "CRC_BGP_NIC_1_MAC": "52:54:00:11:11:11", "CRC_BGP_NIC_2_MAC": "52:54:00:11:11:12", "CRC_HTTPS_PROXY": "", "CRC_HTTP_PROXY": "", "CRC_STORAGE_NAMESPACE": "crc-storage", "CRC_STORAGE_RETRIES": "3", "CRC_URL": "'https://developers.redhat.com/content-gateway/rest/mirror/pub/openshift-v4/clients/crc/latest/crc-linux-amd64.tar.xz'", "CRC_VERSION": "latest", "DATAPLANE_ANSIBLE_SECRET": "dataplane-ansible-ssh-private-key-secret", "DATAPLANE_ANSIBLE_USER": "", "DATAPLANE_COMPUTE_IP": "192.168.122.100", "DATAPLANE_CONTAINER_PREFIX": "openstack", "DATAPLANE_CONTAINER_TAG": "current-podified", "DATAPLANE_CUSTOM_SERVICE_RUNNER_IMG": "quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest", "DATAPLANE_DEFAULT_GW": "192.168.122.1", "DATAPLANE_EXTRA_NOVA_CONFIG_FILE": "/dev/null", "DATAPLANE_GROWVOLS_ARGS": "/=8GB /tmp=1GB /home=1GB /var=100%", "DATAPLANE_KUSTOMIZE_SCENARIO": "preprovisioned", "DATAPLANE_NETWORKER_IP": "192.168.122.200", "DATAPLANE_NETWORK_INTERFACE_NAME": "eth0", "DATAPLANE_NOVA_NFS_PATH": "", "DATAPLANE_NTP_SERVER": "pool.ntp.org", "DATAPLANE_PLAYBOOK": "osp.edpm.download_cache", "DATAPLANE_REGISTRY_URL": "quay.io/podified-antelope-centos9", "DATAPLANE_RUNNER_IMG": "", "DATAPLANE_SERVER_ROLE": "compute", "DATAPLANE_SSHD_ALLOWED_RANGES": "['192.168.122.0/24']", "DATAPLANE_TIMEOUT": "30m", "DATAPLANE_TLS_ENABLED": "true", "DATAPLANE_TOTAL_NETWORKER_NODES": "1", "DATAPLANE_TOTAL_NODES": "1", "DBSERVICE": "galera", "DESIGNATE": "config/samples/designate_v1beta1_designate.yaml", "DESIGNATE_BRANCH": "main", "DESIGNATE_COMMIT_HASH": "", "DESIGNATE_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/config/samples/designate_v1beta1_designate.yaml", "DESIGNATE_IMG": "quay.io/openstack-k8s-operators/designate-operator-index:latest", "DESIGNATE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/kuttl-test.yaml", "DESIGNATE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/designate-operator/test/kuttl/tests", "DESIGNATE_KUTTL_NAMESPACE": "designate-kuttl-tests", "DESIGNATE_REPO": "https://github.com/openstack-k8s-operators/designate-operator.git", "DNSDATA": "config/samples/network_v1beta1_dnsdata.yaml", "DNSDATA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsdata.yaml", "DNSMASQ": "config/samples/network_v1beta1_dnsmasq.yaml", "DNSMASQ_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_dnsmasq.yaml", "DNS_DEPL_IMG": "unused", "DNS_DOMAIN": "localdomain", "DOWNLOAD_TOOLS_SELECTION": "all", "EDPM_ATTACH_EXTNET": "true", "EDPM_COMPUTE_ADDITIONAL_HOST_ROUTES": "'[]'", "EDPM_COMPUTE_ADDITIONAL_NETWORKS": "'[]'", "EDPM_COMPUTE_CELLS": "1", "EDPM_COMPUTE_CEPH_ENABLED": "true", "EDPM_COMPUTE_CEPH_NOVA": "true", "EDPM_COMPUTE_DHCP_AGENT_ENABLED": "true", "EDPM_COMPUTE_SRIOV_ENABLED": "true", "EDPM_COMPUTE_SUFFIX": "0", "EDPM_CONFIGURE_DEFAULT_ROUTE": "true", "EDPM_CONFIGURE_HUGEPAGES": "false", "EDPM_CONFIGURE_NETWORKING": "true", "EDPM_FIRSTBOOT_EXTRA": "/tmp/edpm-firstboot-extra", "EDPM_NETWORKER_SUFFIX": "0", "EDPM_TOTAL_NETWORKERS": "1", "EDPM_TOTAL_NODES": "1", "GALERA_REPLICAS": "", "GENERATE_SSH_KEYS": "true", "GIT_CLONE_OPTS": "", "GLANCE": "config/samples/glance_v1beta1_glance.yaml", "GLANCEAPI_DEPL_IMG": "unused", "GLANCE_BRANCH": "main", "GLANCE_COMMIT_HASH": "", "GLANCE_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/config/samples/glance_v1beta1_glance.yaml", "GLANCE_IMG": "quay.io/openstack-k8s-operators/glance-operator-index:latest", "GLANCE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/kuttl-test.yaml", "GLANCE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/glance-operator/test/kuttl/tests", "GLANCE_KUTTL_NAMESPACE": "glance-kuttl-tests", "GLANCE_REPO": "https://github.com/openstack-k8s-operators/glance-operator.git", "HEAT": "config/samples/heat_v1beta1_heat.yaml", "HEATAPI_DEPL_IMG": "unused", "HEATCFNAPI_DEPL_IMG": "unused", "HEATENGINE_DEPL_IMG": "unused", "HEAT_AUTH_ENCRYPTION_KEY": "767c3ed056cbaa3b9dfedb8c6f825bf0", "HEAT_BRANCH": "main", "HEAT_COMMIT_HASH": "", "HEAT_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/config/samples/heat_v1beta1_heat.yaml", "HEAT_IMG": "quay.io/openstack-k8s-operators/heat-operator-index:latest", "HEAT_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/kuttl-test.yaml", "HEAT_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/heat-operator/test/kuttl/tests", "HEAT_KUTTL_NAMESPACE": "heat-kuttl-tests", "HEAT_REPO": "https://github.com/openstack-k8s-operators/heat-operator.git", "HEAT_SERVICE_ENABLED": "true", "HORIZON": "config/samples/horizon_v1beta1_horizon.yaml", "HORIZON_BRANCH": "main", "HORIZON_COMMIT_HASH": "", "HORIZON_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/config/samples/horizon_v1beta1_horizon.yaml", "HORIZON_DEPL_IMG": "unused", "HORIZON_IMG": "quay.io/openstack-k8s-operators/horizon-operator-index:latest", "HORIZON_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/kuttl-test.yaml", "HORIZON_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/horizon-operator/test/kuttl/tests", "HORIZON_KUTTL_NAMESPACE": "horizon-kuttl-tests", "HORIZON_REPO": "https://github.com/openstack-k8s-operators/horizon-operator.git", "INFRA_BRANCH": "main", "INFRA_COMMIT_HASH": "", "INFRA_IMG": "quay.io/openstack-k8s-operators/infra-operator-index:latest", "INFRA_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/kuttl-test.yaml", "INFRA_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/test/kuttl/tests", "INFRA_KUTTL_NAMESPACE": "infra-kuttl-tests", "INFRA_REPO": "https://github.com/openstack-k8s-operators/infra-operator.git", "INSTALL_CERT_MANAGER": "true", "INSTALL_NMSTATE": "true || false", "INSTALL_NNCP": "true || false", "INTERNALAPI_HOST_ROUTES": "", "IPV6_LAB_IPV4_NETWORK_IPADDRESS": "172.30.0.1/24", "IPV6_LAB_IPV6_NETWORK_IPADDRESS": "fd00:abcd:abcd:fc00::1/64", "IPV6_LAB_LIBVIRT_STORAGE_POOL": "default", "IPV6_LAB_MANAGE_FIREWALLD": "true", "IPV6_LAB_NAT64_HOST_IPV4": "172.30.0.2/24", "IPV6_LAB_NAT64_HOST_IPV6": "fd00:abcd:abcd:fc00::2/64", "IPV6_LAB_NAT64_INSTANCE_NAME": "nat64-router", "IPV6_LAB_NAT64_IPV6_NETWORK": "fd00:abcd:abcd:fc00::/64", "IPV6_LAB_NAT64_TAYGA_DYNAMIC_POOL": "192.168.255.0/24", "IPV6_LAB_NAT64_TAYGA_IPV4": "192.168.255.1", "IPV6_LAB_NAT64_TAYGA_IPV6": "fd00:abcd:abcd:fc00::3", "IPV6_LAB_NAT64_TAYGA_IPV6_PREFIX": "fd00:abcd:abcd:fcff::/96", "IPV6_LAB_NAT64_UPDATE_PACKAGES": "false", "IPV6_LAB_NETWORK_NAME": "nat64", "IPV6_LAB_SNO_CLUSTER_NETWORK": "fd00:abcd:0::/48", "IPV6_LAB_SNO_HOST_IP": "fd00:abcd:abcd:fc00::11", "IPV6_LAB_SNO_HOST_PREFIX": "64", "IPV6_LAB_SNO_INSTANCE_NAME": "sno", "IPV6_LAB_SNO_MACHINE_NETWORK": "fd00:abcd:abcd:fc00::/64", "IPV6_LAB_SNO_OCP_MIRROR_URL": "https://mirror.openshift.com/pub/openshift-v4/clients/ocp", "IPV6_LAB_SNO_OCP_VERSION": "latest-4.14", "IPV6_LAB_SNO_SERVICE_NETWORK": "fd00:abcd:abcd:fc03::/112", "IPV6_LAB_SSH_PUB_KEY": "/home/zuul/.ssh/id_rsa.pub", "IPV6_LAB_WORK_DIR": "/home/zuul/.ipv6lab", "IRONIC": "config/samples/ironic_v1beta1_ironic.yaml", "IRONICAPI_DEPL_IMG": "unused", "IRONICCON_DEPL_IMG": "unused", "IRONICINS_DEPL_IMG": "unused", "IRONICNAG_DEPL_IMG": "unused", "IRONICPXE_DEPL_IMG": "unused", "IRONIC_BRANCH": "main", "IRONIC_COMMIT_HASH": "", "IRONIC_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/config/samples/ironic_v1beta1_ironic.yaml", "IRONIC_IMAGE": "quay.io/metal3-io/ironic", "IRONIC_IMAGE_TAG": "release-24.1", "IRONIC_IMG": "quay.io/openstack-k8s-operators/ironic-operator-index:latest", "IRONIC_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/kuttl-test.yaml", "IRONIC_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ironic-operator/test/kuttl/tests", "IRONIC_KUTTL_NAMESPACE": "ironic-kuttl-tests", "IRONIC_REPO": "https://github.com/openstack-k8s-operators/ironic-operator.git", "KEYSTONEAPI": "config/samples/keystone_v1beta1_keystoneapi.yaml", "KEYSTONEAPI_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/config/samples/keystone_v1beta1_keystoneapi.yaml", "KEYSTONEAPI_DEPL_IMG": "unused", "KEYSTONE_BRANCH": "main", "KEYSTONE_COMMIT_HASH": "", "KEYSTONE_FEDERATION_CLIENT_SECRET": "COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f", "KEYSTONE_FEDERATION_CRYPTO_PASSPHRASE": "openstack", "KEYSTONE_IMG": "quay.io/openstack-k8s-operators/keystone-operator-index:latest", "KEYSTONE_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/kuttl-test.yaml", "KEYSTONE_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/keystone-operator/test/kuttl/tests", "KEYSTONE_KUTTL_NAMESPACE": "keystone-kuttl-tests", "KEYSTONE_REPO": "https://github.com/openstack-k8s-operators/keystone-operator.git", "KUBEADMIN_PWD": "12345678", "LIBVIRT_SECRET": "libvirt-secret", "LOKI_DEPLOY_MODE": "openshift-network", "LOKI_DEPLOY_NAMESPACE": "netobserv", "LOKI_DEPLOY_SIZE": "1x.demo", "LOKI_NAMESPACE": "openshift-operators-redhat", "LOKI_OPERATOR_GROUP": "openshift-operators-redhat-loki", "LOKI_SUBSCRIPTION": "loki-operator", "LVMS_CR": "1", "MANILA": "config/samples/manila_v1beta1_manila.yaml", "MANILAAPI_DEPL_IMG": "unused", "MANILASCH_DEPL_IMG": "unused", "MANILASHARE_DEPL_IMG": "unused", "MANILA_BRANCH": "main", "MANILA_COMMIT_HASH": "", "MANILA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/config/samples/manila_v1beta1_manila.yaml", "MANILA_IMG": "quay.io/openstack-k8s-operators/manila-operator-index:latest", "MANILA_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/kuttl-test.yaml", "MANILA_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/manila-operator/test/kuttl/tests", "MANILA_KUTTL_NAMESPACE": "manila-kuttl-tests", "MANILA_REPO": "https://github.com/openstack-k8s-operators/manila-operator.git", "MANILA_SERVICE_ENABLED": "true", "MARIADB": "config/samples/mariadb_v1beta1_galera.yaml", "MARIADB_BRANCH": "main", "MARIADB_CHAINSAW_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/config.yaml", "MARIADB_CHAINSAW_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/chainsaw/tests", "MARIADB_CHAINSAW_NAMESPACE": "mariadb-chainsaw-tests", "MARIADB_COMMIT_HASH": "", "MARIADB_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/config/samples/mariadb_v1beta1_galera.yaml", "MARIADB_DEPL_IMG": "unused", "MARIADB_IMG": "quay.io/openstack-k8s-operators/mariadb-operator-index:latest", "MARIADB_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/kuttl-test.yaml", "MARIADB_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/mariadb-operator/test/kuttl/tests", "MARIADB_KUTTL_NAMESPACE": "mariadb-kuttl-tests", "MARIADB_REPO": "https://github.com/openstack-k8s-operators/mariadb-operator.git", "MEMCACHED": "config/samples/memcached_v1beta1_memcached.yaml", "MEMCACHED_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/memcached_v1beta1_memcached.yaml", "MEMCACHED_DEPL_IMG": "unused", "METADATA_SHARED_SECRET": "1234567842", "METALLB_IPV6_POOL": "fd00:aaaa::80-fd00:aaaa::90", "METALLB_POOL": "192.168.122.80-192.168.122.90", "MICROSHIFT": "0", "NAMESPACE": "openstack", "NETCONFIG": "config/samples/network_v1beta1_netconfig.yaml", "NETCONFIG_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator/config/samples/network_v1beta1_netconfig.yaml", "NETCONFIG_DEPL_IMG": "unused", "NETOBSERV_DEPLOY_NAMESPACE": "netobserv", "NETOBSERV_NAMESPACE": "openshift-netobserv-operator", "NETOBSERV_OPERATOR_GROUP": "openshift-netobserv-operator-net", "NETOBSERV_SUBSCRIPTION": "netobserv-operator", "NETWORK_BGP": "false", "NETWORK_DESIGNATE_ADDRESS_PREFIX": "172.28.0", "NETWORK_DESIGNATE_EXT_ADDRESS_PREFIX": "172.50.0", "NETWORK_INTERNALAPI_ADDRESS_PREFIX": "172.17.0", "NETWORK_ISOLATION": "true", "NETWORK_ISOLATION_INSTANCE_NAME": "crc", "NETWORK_ISOLATION_IPV4": "true", "NETWORK_ISOLATION_IPV4_ADDRESS": "172.16.1.1/24", "NETWORK_ISOLATION_IPV4_NAT": "true", "NETWORK_ISOLATION_IPV6": "false", "NETWORK_ISOLATION_IPV6_ADDRESS": "fd00:aaaa::1/64", "NETWORK_ISOLATION_IP_ADDRESS": "192.168.122.10", "NETWORK_ISOLATION_MAC": "52:54:00:11:11:10", "NETWORK_ISOLATION_NETWORK_NAME": "net-iso", "NETWORK_ISOLATION_NET_NAME": "default", "NETWORK_ISOLATION_USE_DEFAULT_NETWORK": "true", "NETWORK_MTU": "1500", "NETWORK_STORAGEMGMT_ADDRESS_PREFIX": "172.20.0", "NETWORK_STORAGE_ADDRESS_PREFIX": "172.18.0", "NETWORK_STORAGE_MACVLAN": "", "NETWORK_TENANT_ADDRESS_PREFIX": "172.19.0", "NETWORK_VLAN_START": "20", "NETWORK_VLAN_STEP": "1", "NEUTRONAPI": "config/samples/neutron_v1beta1_neutronapi.yaml", "NEUTRONAPI_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/config/samples/neutron_v1beta1_neutronapi.yaml", "NEUTRONAPI_DEPL_IMG": "unused", "NEUTRON_BRANCH": "main", "NEUTRON_COMMIT_HASH": "", "NEUTRON_IMG": "quay.io/openstack-k8s-operators/neutron-operator-index:latest", "NEUTRON_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/kuttl-test.yaml", "NEUTRON_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/neutron-operator/test/kuttl/tests", "NEUTRON_KUTTL_NAMESPACE": "neutron-kuttl-tests", "NEUTRON_REPO": "https://github.com/openstack-k8s-operators/neutron-operator.git", "NFS_HOME": "/home/nfs", "NMSTATE_NAMESPACE": "openshift-nmstate", "NMSTATE_OPERATOR_GROUP": "openshift-nmstate-tn6k8", "NMSTATE_SUBSCRIPTION": "kubernetes-nmstate-operator", "NNCP_ADDITIONAL_HOST_ROUTES": "", "NNCP_BGP_1_INTERFACE": "enp7s0", "NNCP_BGP_1_IP_ADDRESS": "100.65.4.2", "NNCP_BGP_2_INTERFACE": "enp8s0", "NNCP_BGP_2_IP_ADDRESS": "100.64.4.2", "NNCP_BRIDGE": "ospbr", "NNCP_CLEANUP_TIMEOUT": "120s", "NNCP_CTLPLANE_IPV6_ADDRESS_PREFIX": "fd00:aaaa::", "NNCP_CTLPLANE_IPV6_ADDRESS_SUFFIX": "10", "NNCP_CTLPLANE_IP_ADDRESS_PREFIX": "192.168.122", "NNCP_CTLPLANE_IP_ADDRESS_SUFFIX": "10", "NNCP_DNS_SERVER": "192.168.122.1", "NNCP_DNS_SERVER_IPV6": "fd00:aaaa::1", "NNCP_GATEWAY": "192.168.122.1", "NNCP_GATEWAY_IPV6": "fd00:aaaa::1", "NNCP_INTERFACE": "enp6s0", "NNCP_NODES": "", "NNCP_TIMEOUT": "240s", "NOVA": "config/samples/nova_v1beta1_nova_collapsed_cell.yaml", "NOVA_BRANCH": "main", "NOVA_COMMIT_HASH": "", "NOVA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/nova-operator/config/samples/nova_v1beta1_nova_collapsed_cell.yaml", "NOVA_IMG": "quay.io/openstack-k8s-operators/nova-operator-index:latest", "NOVA_REPO": "https://github.com/openstack-k8s-operators/nova-operator.git", "NUMBER_OF_INSTANCES": "1", "OCP_NETWORK_NAME": "crc", "OCTAVIA": "config/samples/octavia_v1beta1_octavia.yaml", "OCTAVIA_BRANCH": "main", "OCTAVIA_COMMIT_HASH": "", "OCTAVIA_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/config/samples/octavia_v1beta1_octavia.yaml", "OCTAVIA_IMG": "quay.io/openstack-k8s-operators/octavia-operator-index:latest", "OCTAVIA_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/kuttl-test.yaml", "OCTAVIA_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/octavia-operator/test/kuttl/tests", "OCTAVIA_KUTTL_NAMESPACE": "octavia-kuttl-tests", "OCTAVIA_REPO": "https://github.com/openstack-k8s-operators/octavia-operator.git", "OKD": "false", "OPENSTACK_BRANCH": "main", "OPENSTACK_BUNDLE_IMG": "quay.io/openstack-k8s-operators/openstack-operator-bundle:latest", "OPENSTACK_COMMIT_HASH": "", "OPENSTACK_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml", "OPENSTACK_CRDS_DIR": "openstack_crds", "OPENSTACK_CTLPLANE": "config/samples/core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml", "OPENSTACK_IMG": "quay.io/openstack-k8s-operators/openstack-operator-index:latest", "OPENSTACK_K8S_BRANCH": "main", "OPENSTACK_K8S_TAG": "latest", "OPENSTACK_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/kuttl-test.yaml", "OPENSTACK_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/openstack-operator/test/kuttl/tests", "OPENSTACK_KUTTL_NAMESPACE": "openstack-kuttl-tests", "OPENSTACK_NEUTRON_CUSTOM_CONF": "", "OPENSTACK_REPO": "https://github.com/openstack-k8s-operators/openstack-operator.git", "OPENSTACK_STORAGE_BUNDLE_IMG": "quay.io/openstack-k8s-operators/openstack-operator-storage-bundle:latest", "OPERATOR_BASE_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator", "OPERATOR_CHANNEL": "", "OPERATOR_NAMESPACE": "openstack-operators", "OPERATOR_SOURCE": "", "OPERATOR_SOURCE_NAMESPACE": "", "OUT": "/home/zuul/ci-framework-data/artifacts/manifests", "OUTPUT_DIR": "/home/zuul/ci-framework-data/artifacts/edpm", "OVNCONTROLLER": "config/samples/ovn_v1beta1_ovncontroller.yaml", "OVNCONTROLLER_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovncontroller.yaml", "OVNCONTROLLER_NMAP": "true", "OVNDBS": "config/samples/ovn_v1beta1_ovndbcluster.yaml", "OVNDBS_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovndbcluster.yaml", "OVNNORTHD": "config/samples/ovn_v1beta1_ovnnorthd.yaml", "OVNNORTHD_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/config/samples/ovn_v1beta1_ovnnorthd.yaml", "OVN_BRANCH": "main", "OVN_COMMIT_HASH": "", "OVN_IMG": "quay.io/openstack-k8s-operators/ovn-operator-index:latest", "OVN_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/kuttl-test.yaml", "OVN_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/ovn-operator/test/kuttl/tests", "OVN_KUTTL_NAMESPACE": "ovn-kuttl-tests", "OVN_REPO": "https://github.com/openstack-k8s-operators/ovn-operator.git", "PASSWORD": "12345678", "PLACEMENTAPI": "config/samples/placement_v1beta1_placementapi.yaml", "PLACEMENTAPI_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/config/samples/placement_v1beta1_placementapi.yaml", "PLACEMENTAPI_DEPL_IMG": "unused", "PLACEMENT_BRANCH": "main", "PLACEMENT_COMMIT_HASH": "", "PLACEMENT_IMG": "quay.io/openstack-k8s-operators/placement-operator-index:latest", "PLACEMENT_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/kuttl-test.yaml", "PLACEMENT_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/placement-operator/test/kuttl/tests", "PLACEMENT_KUTTL_NAMESPACE": "placement-kuttl-tests", "PLACEMENT_REPO": "https://github.com/openstack-k8s-operators/placement-operator.git", "PULL_SECRET": "/home/zuul/src/review.rdoproject.org/rdo-jobs/playbooks/data_plane_adoption/pull-secret.txt", "RABBITMQ": "docs/examples/default-security-context/rabbitmq.yaml", "RABBITMQ_BRANCH": "patches", "RABBITMQ_COMMIT_HASH": "", "RABBITMQ_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/rabbitmq-operator/docs/examples/default-security-context/rabbitmq.yaml", "RABBITMQ_DEPL_IMG": "unused", "RABBITMQ_IMG": "quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest", "RABBITMQ_REPO": "https://github.com/openstack-k8s-operators/rabbitmq-cluster-operator.git", "REDHAT_OPERATORS": "false", "REDIS": "config/samples/redis_v1beta1_redis.yaml", "REDIS_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/infra-operator-redis/config/samples/redis_v1beta1_redis.yaml", "REDIS_DEPL_IMG": "unused", "RH_REGISTRY_PWD": "", "RH_REGISTRY_USER": "", "SECRET": "osp-secret", "SG_CORE_DEPL_IMG": "unused", "STANDALONE_COMPUTE_DRIVER": "libvirt", "STANDALONE_EXTERNAL_NET_PREFFIX": "172.21.0", "STANDALONE_INTERNALAPI_NET_PREFIX": "172.17.0", "STANDALONE_STORAGEMGMT_NET_PREFIX": "172.20.0", "STANDALONE_STORAGE_NET_PREFIX": "172.18.0", "STANDALONE_TENANT_NET_PREFIX": "172.19.0", "STORAGEMGMT_HOST_ROUTES": "", "STORAGE_CLASS": "local-storage", "STORAGE_HOST_ROUTES": "", "SWIFT": "config/samples/swift_v1beta1_swift.yaml", "SWIFT_BRANCH": "main", "SWIFT_COMMIT_HASH": "", "SWIFT_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/config/samples/swift_v1beta1_swift.yaml", "SWIFT_IMG": "quay.io/openstack-k8s-operators/swift-operator-index:latest", "SWIFT_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/kuttl-test.yaml", "SWIFT_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/swift-operator/test/kuttl/tests", "SWIFT_KUTTL_NAMESPACE": "swift-kuttl-tests", "SWIFT_REPO": "https://github.com/openstack-k8s-operators/swift-operator.git", "TELEMETRY": "config/samples/telemetry_v1beta1_telemetry.yaml", "TELEMETRY_BRANCH": "main", "TELEMETRY_COMMIT_HASH": "", "TELEMETRY_CR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/config/samples/telemetry_v1beta1_telemetry.yaml", "TELEMETRY_IMG": "quay.io/openstack-k8s-operators/telemetry-operator-index:latest", "TELEMETRY_KUTTL_BASEDIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator", "TELEMETRY_KUTTL_CONF": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/kuttl-test.yaml", "TELEMETRY_KUTTL_DIR": "/home/zuul/ci-framework-data/artifacts/manifests/operator/telemetry-operator/test/kuttl/suites", "TELEMETRY_KUTTL_NAMESPACE": "telemetry-kuttl-tests", "TELEMETRY_KUTTL_RELPATH": "test/kuttl/suites", "TELEMETRY_REPO": "https://github.com/openstack-k8s-operators/telemetry-operator.git", "TENANT_HOST_ROUTES": "", "TIMEOUT": "300s", "TLS_ENABLED": "false", "tripleo_deploy": "export REGISTRY_USER:" }, "cifmw_install_yamls_environment": { "BMO_SETUP": false, "CHECKOUT_FROM_OPENSTACK_REF": "true", "KUBECONFIG": "/home/zuul/.crc/machines/crc/kubeconfig", "OPENSTACK_K8S_BRANCH": "main", "OUT": "/home/zuul/ci-framework-data/artifacts/manifests", "OUTPUT_DIR": "/home/zuul/ci-framework-data/artifacts/edpm" }, "cifmw_install_yamls_vars": { "BMO_SETUP": false }, "cifmw_openshift_api": "https://api.crc.testing:6443", "cifmw_openshift_context": "default/api-crc-testing:6443/kubeadmin", "cifmw_openshift_kubeconfig": "/home/zuul/.crc/machines/crc/kubeconfig", "cifmw_openshift_login_api": "https://api.crc.testing:6443", "cifmw_openshift_login_cert_login": false, "cifmw_openshift_login_context": "default/api-crc-testing:6443/kubeadmin", "cifmw_openshift_login_kubeconfig": "/home/zuul/.crc/machines/crc/kubeconfig", "cifmw_openshift_login_password": 123456789, "cifmw_openshift_login_token": "sha256~DRDkWcBbIdKn0u9Nxm-2md9dPVf-gVXLRNv2O8KcEp4", "cifmw_openshift_login_user": "kubeadmin", "cifmw_openshift_password": 123456789, "cifmw_openshift_setup_skip_internal_registry_tls_verify": true, "cifmw_openshift_skip_tls_verify": true, "cifmw_openshift_token": "sha256~DRDkWcBbIdKn0u9Nxm-2md9dPVf-gVXLRNv2O8KcEp4", "cifmw_openshift_user": "kubeadmin", "cifmw_path": "/home/zuul/.crc/bin:/home/zuul/.crc/bin/oc:/home/zuul/bin:/home/zuul/.local/bin:/home/zuul/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin", "cifmw_repo_setup_commit_hash": null, "cifmw_repo_setup_distro_hash": null, "cifmw_repo_setup_dlrn_api_url": "https://trunk.rdoproject.org/api-centos9-antelope", "cifmw_repo_setup_dlrn_url": "https://trunk.rdoproject.org/centos9-antelope/current-podified/delorean.repo.md5", "cifmw_repo_setup_extended_hash": null, "cifmw_repo_setup_full_hash": "c3923531bcda0b0811b2d5053f189beb", "cifmw_repo_setup_release": "antelope", "cifmw_use_crc": false, "cifmw_use_libvirt": false, "discovered_interpreter_python": "/usr/bin/python3", "gather_subset": [ "all" ], "module_setup": true, "pre_deploy": [ { "inventory": "/home/zuul/ci-framework-data/artifacts/zuul_inventory.yml", "name": "Fetch compute facts", "source": "fetch_compute_facts.yml", "type": "playbook" } ], "pre_infra": [ { "connection": "local", "inventory": "localhost,", "name": "Download needed tools", "source": "/home/zuul/src/github.com/openstack-k8s-operators/install_yamls/devsetup/download_tools.yaml", "type": "playbook" } ] }home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci-env/0000755000175000017500000000000015134437263023534 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci-env/networking-info.yml0000644000175000017500000000646415134432007027400 0ustar zuulzuulcrc_ci_bootstrap_networks_out: controller: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.11/24 mac: fa:16:3e:b2:1f:bf mtu: '1500' internal-api: connection: ci-private-network-20 iface: eth1.20 ip: 172.17.0.4/24 mac: 52:54:00:91:b6:a0 mtu: '1496' parent_iface: eth1 vlan: 20 storage: connection: ci-private-network-21 iface: eth1.21 ip: 172.18.0.4/24 mac: 52:54:00:8c:4a:57 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: eth1.23 ip: 172.20.0.4/24 mac: 52:54:00:e6:85:75 mtu: '1496' parent_iface: eth1 vlan: 23 tenant: connection: ci-private-network-22 iface: eth1.22 ip: 172.19.0.4/24 mac: 52:54:00:34:d6:b6 mtu: '1496' parent_iface: eth1 vlan: 22 crc: default: connection: ci-private-network gw: 192.168.122.1 iface: ens7 ip: 192.168.122.10/24 mac: fa:16:3e:ee:8d:ea mtu: '1500' internal-api: connection: ci-private-network-20 iface: ens7.20 ip: 172.17.0.5/24 mac: 52:54:00:98:87:e6 mtu: '1496' parent_iface: ens7 vlan: 20 storage: connection: ci-private-network-21 iface: ens7.21 ip: 172.18.0.5/24 mac: 52:54:00:fe:f9:a5 mtu: '1496' parent_iface: ens7 vlan: 21 storage_mgmt: connection: ci-private-network-23 iface: ens7.23 ip: 172.20.0.5/24 mac: 52:54:00:66:52:20 mtu: '1496' parent_iface: ens7 vlan: 23 tenant: connection: ci-private-network-22 iface: ens7.22 ip: 172.19.0.5/24 mac: 52:54:00:c7:ca:3e mtu: '1496' parent_iface: ens7 vlan: 22 standalone: default: connection: ci-private-network gw: 192.168.122.1 iface: eth1 ip: 192.168.122.100/24 mac: fa:16:3e:b5:53:e5 mtu: '1500' internal-api: iface: eth1.20 ip: 172.17.0.100/24 mac: 52:54:00:87:a9:f6 mtu: '1496' parent_iface: eth1 vlan: 20 storage: iface: eth1.21 ip: 172.18.0.100/24 mac: 52:54:00:4f:71:e3 mtu: '1496' parent_iface: eth1 vlan: 21 storage_mgmt: iface: eth1.23 ip: 172.20.0.100/24 mac: 52:54:00:40:23:1f mtu: '1496' parent_iface: eth1 vlan: 23 tenant: iface: eth1.22 ip: 172.19.0.100/24 mac: 52:54:00:f6:c7:15 mtu: '1496' parent_iface: eth1 vlan: 22 crc_ci_bootstrap_provider_dns: - 199.204.44.24 - 199.204.47.54 home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_gen_kustomize_values/0000755000175000017500000000000015134412737027267 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_gen_kustomize_values/olm-values/0000755000175000017500000000000015134437263031354 5ustar zuulzuul././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_gen_kustomize_values/olm-values/values.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_gen_kustomize_values/olm-values/values.yam0000644000175000017500000000041315134412737033360 0ustar zuulzuulapiVersion: v1 data: openstack-operator-channel: alpha openstack-operator-image: quay.io/openstack-k8s-operators/openstack-operator-index:latest kind: ConfigMap metadata: annotations: config.kubernetes.io/local-config: 'true' name: olm-values home/zuul/zuul-output/logs/ci-framework-data/artifacts/installed-packages.yml0000644000175000017500000023013215134432023026617 0ustar zuulzuulNetworkManager: - arch: x86_64 epoch: 1 name: NetworkManager release: 2.el9 source: rpm version: 1.54.3 NetworkManager-libnm: - arch: x86_64 epoch: 1 name: NetworkManager-libnm release: 2.el9 source: rpm version: 1.54.3 NetworkManager-team: - arch: x86_64 epoch: 1 name: NetworkManager-team release: 2.el9 source: rpm version: 1.54.3 NetworkManager-tui: - arch: x86_64 epoch: 1 name: NetworkManager-tui release: 2.el9 source: rpm version: 1.54.3 aardvark-dns: - arch: x86_64 epoch: 2 name: aardvark-dns release: 1.el9 source: rpm version: 1.17.0 abattis-cantarell-fonts: - arch: noarch epoch: null name: abattis-cantarell-fonts release: 4.el9 source: rpm version: '0.301' acl: - arch: x86_64 epoch: null name: acl release: 4.el9 source: rpm version: 2.3.1 adobe-source-code-pro-fonts: - arch: noarch epoch: null name: adobe-source-code-pro-fonts release: 12.el9.1 source: rpm version: 2.030.1.050 alternatives: - arch: x86_64 epoch: null name: alternatives release: 2.el9 source: rpm version: '1.24' annobin: - arch: x86_64 epoch: null name: annobin release: 1.el9 source: rpm version: '12.98' ansible-core: - arch: x86_64 epoch: 1 name: ansible-core release: 2.el9 source: rpm version: 2.14.18 apr: - arch: x86_64 epoch: null name: apr release: 12.el9 source: rpm version: 1.7.0 apr-util: - arch: x86_64 epoch: null name: apr-util release: 23.el9 source: rpm version: 1.6.1 apr-util-bdb: - arch: x86_64 epoch: null name: apr-util-bdb release: 23.el9 source: rpm version: 1.6.1 apr-util-openssl: - arch: x86_64 epoch: null name: apr-util-openssl release: 23.el9 source: rpm version: 1.6.1 attr: - arch: x86_64 epoch: null name: attr release: 3.el9 source: rpm version: 2.5.1 audit: - arch: x86_64 epoch: null name: audit release: 8.el9 source: rpm version: 3.1.5 audit-libs: - arch: x86_64 epoch: null name: audit-libs release: 8.el9 source: rpm version: 3.1.5 authselect: - arch: x86_64 epoch: null name: authselect release: 3.el9 source: rpm version: 1.2.6 authselect-compat: - arch: x86_64 epoch: null name: authselect-compat release: 3.el9 source: rpm version: 1.2.6 authselect-libs: - arch: x86_64 epoch: null name: authselect-libs release: 3.el9 source: rpm version: 1.2.6 basesystem: - arch: noarch epoch: null name: basesystem release: 13.el9 source: rpm version: '11' bash: - arch: x86_64 epoch: null name: bash release: 9.el9 source: rpm version: 5.1.8 bash-completion: - arch: noarch epoch: 1 name: bash-completion release: 5.el9 source: rpm version: '2.11' binutils: - arch: x86_64 epoch: null name: binutils release: 69.el9 source: rpm version: 2.35.2 binutils-gold: - arch: x86_64 epoch: null name: binutils-gold release: 69.el9 source: rpm version: 2.35.2 buildah: - arch: x86_64 epoch: 2 name: buildah release: 1.el9 source: rpm version: 1.41.3 bzip2: - arch: x86_64 epoch: null name: bzip2 release: 10.el9 source: rpm version: 1.0.8 bzip2-libs: - arch: x86_64 epoch: null name: bzip2-libs release: 10.el9 source: rpm version: 1.0.8 c-ares: - arch: x86_64 epoch: null name: c-ares release: 2.el9 source: rpm version: 1.19.1 ca-certificates: - arch: noarch epoch: null name: ca-certificates release: 91.el9 source: rpm version: 2025.2.80_v9.0.305 centos-gpg-keys: - arch: noarch epoch: null name: centos-gpg-keys release: 34.el9 source: rpm version: '9.0' centos-logos: - arch: x86_64 epoch: null name: centos-logos release: 1.el9 source: rpm version: '90.9' centos-stream-release: - arch: noarch epoch: null name: centos-stream-release release: 34.el9 source: rpm version: '9.0' centos-stream-repos: - arch: noarch epoch: null name: centos-stream-repos release: 34.el9 source: rpm version: '9.0' checkpolicy: - arch: x86_64 epoch: null name: checkpolicy release: 1.el9 source: rpm version: '3.6' chrony: - arch: x86_64 epoch: null name: chrony release: 1.el9 source: rpm version: '4.8' cloud-init: - arch: noarch epoch: null name: cloud-init release: 8.el9 source: rpm version: '24.4' cloud-utils-growpart: - arch: x86_64 epoch: null name: cloud-utils-growpart release: 1.el9 source: rpm version: '0.33' cmake-filesystem: - arch: x86_64 epoch: null name: cmake-filesystem release: 3.el9 source: rpm version: 3.31.8 cockpit-bridge: - arch: noarch epoch: null name: cockpit-bridge release: 1.el9 source: rpm version: '348' cockpit-system: - arch: noarch epoch: null name: cockpit-system release: 1.el9 source: rpm version: '348' cockpit-ws: - arch: x86_64 epoch: null name: cockpit-ws release: 1.el9 source: rpm version: '348' cockpit-ws-selinux: - arch: x86_64 epoch: null name: cockpit-ws-selinux release: 1.el9 source: rpm version: '348' conmon: - arch: x86_64 epoch: 3 name: conmon release: 1.el9 source: rpm version: 2.1.13 container-selinux: - arch: noarch epoch: 4 name: container-selinux release: 1.el9 source: rpm version: 2.244.0 containers-common: - arch: x86_64 epoch: 4 name: containers-common release: 134.el9 source: rpm version: '1' containers-common-extra: - arch: x86_64 epoch: 4 name: containers-common-extra release: 134.el9 source: rpm version: '1' coreutils: - arch: x86_64 epoch: null name: coreutils release: 39.el9 source: rpm version: '8.32' coreutils-common: - arch: x86_64 epoch: null name: coreutils-common release: 39.el9 source: rpm version: '8.32' cpio: - arch: x86_64 epoch: null name: cpio release: 16.el9 source: rpm version: '2.13' cpp: - arch: x86_64 epoch: null name: cpp release: 14.el9 source: rpm version: 11.5.0 cracklib: - arch: x86_64 epoch: null name: cracklib release: 28.el9 source: rpm version: 2.9.6 cracklib-dicts: - arch: x86_64 epoch: null name: cracklib-dicts release: 28.el9 source: rpm version: 2.9.6 createrepo_c: - arch: x86_64 epoch: null name: createrepo_c release: 4.el9 source: rpm version: 0.20.1 createrepo_c-libs: - arch: x86_64 epoch: null name: createrepo_c-libs release: 4.el9 source: rpm version: 0.20.1 criu: - arch: x86_64 epoch: null name: criu release: 3.el9 source: rpm version: '3.19' criu-libs: - arch: x86_64 epoch: null name: criu-libs release: 3.el9 source: rpm version: '3.19' cronie: - arch: x86_64 epoch: null name: cronie release: 14.el9 source: rpm version: 1.5.7 cronie-anacron: - arch: x86_64 epoch: null name: cronie-anacron release: 14.el9 source: rpm version: 1.5.7 crontabs: - arch: noarch epoch: null name: crontabs release: 26.20190603git.el9 source: rpm version: '1.11' crun: - arch: x86_64 epoch: null name: crun release: 1.el9 source: rpm version: '1.24' crypto-policies: - arch: noarch epoch: null name: crypto-policies release: 1.gite9c4db2.el9 source: rpm version: '20251126' crypto-policies-scripts: - arch: noarch epoch: null name: crypto-policies-scripts release: 1.gite9c4db2.el9 source: rpm version: '20251126' cryptsetup-libs: - arch: x86_64 epoch: null name: cryptsetup-libs release: 2.el9 source: rpm version: 2.8.1 curl: - arch: x86_64 epoch: null name: curl release: 38.el9 source: rpm version: 7.76.1 cyrus-sasl: - arch: x86_64 epoch: null name: cyrus-sasl release: 21.el9 source: rpm version: 2.1.27 cyrus-sasl-devel: - arch: x86_64 epoch: null name: cyrus-sasl-devel release: 21.el9 source: rpm version: 2.1.27 cyrus-sasl-gssapi: - arch: x86_64 epoch: null name: cyrus-sasl-gssapi release: 21.el9 source: rpm version: 2.1.27 cyrus-sasl-lib: - arch: x86_64 epoch: null name: cyrus-sasl-lib release: 21.el9 source: rpm version: 2.1.27 dbus: - arch: x86_64 epoch: 1 name: dbus release: 8.el9 source: rpm version: 1.12.20 dbus-broker: - arch: x86_64 epoch: null name: dbus-broker release: 7.el9 source: rpm version: '28' dbus-common: - arch: noarch epoch: 1 name: dbus-common release: 8.el9 source: rpm version: 1.12.20 dbus-libs: - arch: x86_64 epoch: 1 name: dbus-libs release: 8.el9 source: rpm version: 1.12.20 dbus-tools: - arch: x86_64 epoch: 1 name: dbus-tools release: 8.el9 source: rpm version: 1.12.20 debugedit: - arch: x86_64 epoch: null name: debugedit release: 11.el9 source: rpm version: '5.0' dejavu-sans-fonts: - arch: noarch epoch: null name: dejavu-sans-fonts release: 18.el9 source: rpm version: '2.37' desktop-file-utils: - arch: x86_64 epoch: null name: desktop-file-utils release: 6.el9 source: rpm version: '0.26' device-mapper: - arch: x86_64 epoch: 9 name: device-mapper release: 2.el9 source: rpm version: 1.02.206 device-mapper-libs: - arch: x86_64 epoch: 9 name: device-mapper-libs release: 2.el9 source: rpm version: 1.02.206 dhcp-client: - arch: x86_64 epoch: 12 name: dhcp-client release: 19.b1.el9 source: rpm version: 4.4.2 dhcp-common: - arch: noarch epoch: 12 name: dhcp-common release: 19.b1.el9 source: rpm version: 4.4.2 diffutils: - arch: x86_64 epoch: null name: diffutils release: 12.el9 source: rpm version: '3.7' dnf: - arch: noarch epoch: null name: dnf release: 31.el9 source: rpm version: 4.14.0 dnf-data: - arch: noarch epoch: null name: dnf-data release: 31.el9 source: rpm version: 4.14.0 dnf-plugins-core: - arch: noarch epoch: null name: dnf-plugins-core release: 25.el9 source: rpm version: 4.3.0 dracut: - arch: x86_64 epoch: null name: dracut release: 102.git20250818.el9 source: rpm version: '057' dracut-config-generic: - arch: x86_64 epoch: null name: dracut-config-generic release: 102.git20250818.el9 source: rpm version: '057' dracut-network: - arch: x86_64 epoch: null name: dracut-network release: 102.git20250818.el9 source: rpm version: '057' dracut-squash: - arch: x86_64 epoch: null name: dracut-squash release: 102.git20250818.el9 source: rpm version: '057' dwz: - arch: x86_64 epoch: null name: dwz release: 1.el9 source: rpm version: '0.16' e2fsprogs: - arch: x86_64 epoch: null name: e2fsprogs release: 8.el9 source: rpm version: 1.46.5 e2fsprogs-libs: - arch: x86_64 epoch: null name: e2fsprogs-libs release: 8.el9 source: rpm version: 1.46.5 ed: - arch: x86_64 epoch: null name: ed release: 12.el9 source: rpm version: 1.14.2 efi-srpm-macros: - arch: noarch epoch: null name: efi-srpm-macros release: 4.el9 source: rpm version: '6' elfutils: - arch: x86_64 epoch: null name: elfutils release: 1.el9 source: rpm version: '0.194' elfutils-debuginfod-client: - arch: x86_64 epoch: null name: elfutils-debuginfod-client release: 1.el9 source: rpm version: '0.194' elfutils-default-yama-scope: - arch: noarch epoch: null name: elfutils-default-yama-scope release: 1.el9 source: rpm version: '0.194' elfutils-libelf: - arch: x86_64 epoch: null name: elfutils-libelf release: 1.el9 source: rpm version: '0.194' elfutils-libs: - arch: x86_64 epoch: null name: elfutils-libs release: 1.el9 source: rpm version: '0.194' emacs-filesystem: - arch: noarch epoch: 1 name: emacs-filesystem release: 18.el9 source: rpm version: '27.2' enchant: - arch: x86_64 epoch: 1 name: enchant release: 30.el9 source: rpm version: 1.6.0 ethtool: - arch: x86_64 epoch: 2 name: ethtool release: 2.el9 source: rpm version: '6.15' expat: - arch: x86_64 epoch: null name: expat release: 6.el9 source: rpm version: 2.5.0 expect: - arch: x86_64 epoch: null name: expect release: 16.el9 source: rpm version: 5.45.4 file: - arch: x86_64 epoch: null name: file release: 16.el9 source: rpm version: '5.39' file-libs: - arch: x86_64 epoch: null name: file-libs release: 16.el9 source: rpm version: '5.39' filesystem: - arch: x86_64 epoch: null name: filesystem release: 5.el9 source: rpm version: '3.16' findutils: - arch: x86_64 epoch: 1 name: findutils release: 7.el9 source: rpm version: 4.8.0 fonts-filesystem: - arch: noarch epoch: 1 name: fonts-filesystem release: 7.el9.1 source: rpm version: 2.0.5 fonts-srpm-macros: - arch: noarch epoch: 1 name: fonts-srpm-macros release: 7.el9.1 source: rpm version: 2.0.5 fuse-common: - arch: x86_64 epoch: null name: fuse-common release: 9.el9 source: rpm version: 3.10.2 fuse-libs: - arch: x86_64 epoch: null name: fuse-libs release: 17.el9 source: rpm version: 2.9.9 fuse-overlayfs: - arch: x86_64 epoch: null name: fuse-overlayfs release: 1.el9 source: rpm version: '1.16' fuse3: - arch: x86_64 epoch: null name: fuse3 release: 9.el9 source: rpm version: 3.10.2 fuse3-libs: - arch: x86_64 epoch: null name: fuse3-libs release: 9.el9 source: rpm version: 3.10.2 gawk: - arch: x86_64 epoch: null name: gawk release: 6.el9 source: rpm version: 5.1.0 gawk-all-langpacks: - arch: x86_64 epoch: null name: gawk-all-langpacks release: 6.el9 source: rpm version: 5.1.0 gcc: - arch: x86_64 epoch: null name: gcc release: 14.el9 source: rpm version: 11.5.0 gcc-c++: - arch: x86_64 epoch: null name: gcc-c++ release: 14.el9 source: rpm version: 11.5.0 gcc-plugin-annobin: - arch: x86_64 epoch: null name: gcc-plugin-annobin release: 14.el9 source: rpm version: 11.5.0 gdb-minimal: - arch: x86_64 epoch: null name: gdb-minimal release: 2.el9 source: rpm version: '16.3' gdbm-libs: - arch: x86_64 epoch: 1 name: gdbm-libs release: 1.el9 source: rpm version: '1.23' gdisk: - arch: x86_64 epoch: null name: gdisk release: 5.el9 source: rpm version: 1.0.7 gdk-pixbuf2: - arch: x86_64 epoch: null name: gdk-pixbuf2 release: 6.el9 source: rpm version: 2.42.6 geolite2-city: - arch: noarch epoch: null name: geolite2-city release: 6.el9 source: rpm version: '20191217' geolite2-country: - arch: noarch epoch: null name: geolite2-country release: 6.el9 source: rpm version: '20191217' gettext: - arch: x86_64 epoch: null name: gettext release: 8.el9 source: rpm version: '0.21' gettext-libs: - arch: x86_64 epoch: null name: gettext-libs release: 8.el9 source: rpm version: '0.21' ghc-srpm-macros: - arch: noarch epoch: null name: ghc-srpm-macros release: 6.el9 source: rpm version: 1.5.0 git: - arch: x86_64 epoch: null name: git release: 1.el9 source: rpm version: 2.47.3 git-core: - arch: x86_64 epoch: null name: git-core release: 1.el9 source: rpm version: 2.47.3 git-core-doc: - arch: noarch epoch: null name: git-core-doc release: 1.el9 source: rpm version: 2.47.3 glib-networking: - arch: x86_64 epoch: null name: glib-networking release: 3.el9 source: rpm version: 2.68.3 glib2: - arch: x86_64 epoch: null name: glib2 release: 18.el9 source: rpm version: 2.68.4 glibc: - arch: x86_64 epoch: null name: glibc release: 245.el9 source: rpm version: '2.34' glibc-common: - arch: x86_64 epoch: null name: glibc-common release: 245.el9 source: rpm version: '2.34' glibc-devel: - arch: x86_64 epoch: null name: glibc-devel release: 245.el9 source: rpm version: '2.34' glibc-gconv-extra: - arch: x86_64 epoch: null name: glibc-gconv-extra release: 245.el9 source: rpm version: '2.34' glibc-headers: - arch: x86_64 epoch: null name: glibc-headers release: 245.el9 source: rpm version: '2.34' glibc-langpack-en: - arch: x86_64 epoch: null name: glibc-langpack-en release: 245.el9 source: rpm version: '2.34' gmp: - arch: x86_64 epoch: 1 name: gmp release: 13.el9 source: rpm version: 6.2.0 gnupg2: - arch: x86_64 epoch: null name: gnupg2 release: 5.el9 source: rpm version: 2.3.3 gnutls: - arch: x86_64 epoch: null name: gnutls release: 2.el9 source: rpm version: 3.8.10 go-srpm-macros: - arch: noarch epoch: null name: go-srpm-macros release: 1.el9 source: rpm version: 3.8.1 gobject-introspection: - arch: x86_64 epoch: null name: gobject-introspection release: 11.el9 source: rpm version: 1.68.0 gpg-pubkey: - arch: null epoch: null name: gpg-pubkey release: 5ccc5b19 source: rpm version: 8483c65d gpgme: - arch: x86_64 epoch: null name: gpgme release: 6.el9 source: rpm version: 1.15.1 grep: - arch: x86_64 epoch: null name: grep release: 5.el9 source: rpm version: '3.6' groff-base: - arch: x86_64 epoch: null name: groff-base release: 10.el9 source: rpm version: 1.22.4 grub2-common: - arch: noarch epoch: 1 name: grub2-common release: 120.el9 source: rpm version: '2.06' grub2-pc: - arch: x86_64 epoch: 1 name: grub2-pc release: 120.el9 source: rpm version: '2.06' grub2-pc-modules: - arch: noarch epoch: 1 name: grub2-pc-modules release: 120.el9 source: rpm version: '2.06' grub2-tools: - arch: x86_64 epoch: 1 name: grub2-tools release: 120.el9 source: rpm version: '2.06' grub2-tools-minimal: - arch: x86_64 epoch: 1 name: grub2-tools-minimal release: 120.el9 source: rpm version: '2.06' grubby: - arch: x86_64 epoch: null name: grubby release: 69.el9 source: rpm version: '8.40' gsettings-desktop-schemas: - arch: x86_64 epoch: null name: gsettings-desktop-schemas release: 8.el9 source: rpm version: '40.0' gssproxy: - arch: x86_64 epoch: null name: gssproxy release: 7.el9 source: rpm version: 0.8.4 gzip: - arch: x86_64 epoch: null name: gzip release: 1.el9 source: rpm version: '1.12' hostname: - arch: x86_64 epoch: null name: hostname release: 6.el9 source: rpm version: '3.23' httpd-tools: - arch: x86_64 epoch: null name: httpd-tools release: 10.el9 source: rpm version: 2.4.62 hunspell: - arch: x86_64 epoch: null name: hunspell release: 11.el9 source: rpm version: 1.7.0 hunspell-en-GB: - arch: noarch epoch: null name: hunspell-en-GB release: 20.el9 source: rpm version: 0.20140811.1 hunspell-en-US: - arch: noarch epoch: null name: hunspell-en-US release: 20.el9 source: rpm version: 0.20140811.1 hunspell-filesystem: - arch: x86_64 epoch: null name: hunspell-filesystem release: 11.el9 source: rpm version: 1.7.0 hwdata: - arch: noarch epoch: null name: hwdata release: 9.20.el9 source: rpm version: '0.348' ima-evm-utils: - arch: x86_64 epoch: null name: ima-evm-utils release: 2.el9 source: rpm version: 1.6.2 info: - arch: x86_64 epoch: null name: info release: 15.el9 source: rpm version: '6.7' inih: - arch: x86_64 epoch: null name: inih release: 6.el9 source: rpm version: '49' initscripts-rename-device: - arch: x86_64 epoch: null name: initscripts-rename-device release: 4.el9 source: rpm version: 10.11.8 initscripts-service: - arch: noarch epoch: null name: initscripts-service release: 4.el9 source: rpm version: 10.11.8 ipcalc: - arch: x86_64 epoch: null name: ipcalc release: 5.el9 source: rpm version: 1.0.0 iproute: - arch: x86_64 epoch: null name: iproute release: 1.el9 source: rpm version: 6.17.0 iproute-tc: - arch: x86_64 epoch: null name: iproute-tc release: 1.el9 source: rpm version: 6.17.0 iptables-libs: - arch: x86_64 epoch: null name: iptables-libs release: 11.el9 source: rpm version: 1.8.10 iptables-nft: - arch: x86_64 epoch: null name: iptables-nft release: 11.el9 source: rpm version: 1.8.10 iptables-nft-services: - arch: noarch epoch: null name: iptables-nft-services release: 11.el9 source: rpm version: 1.8.10 iputils: - arch: x86_64 epoch: null name: iputils release: 15.el9 source: rpm version: '20210202' irqbalance: - arch: x86_64 epoch: 2 name: irqbalance release: 5.el9 source: rpm version: 1.9.4 jansson: - arch: x86_64 epoch: null name: jansson release: 1.el9 source: rpm version: '2.14' jq: - arch: x86_64 epoch: null name: jq release: 19.el9 source: rpm version: '1.6' json-c: - arch: x86_64 epoch: null name: json-c release: 11.el9 source: rpm version: '0.14' json-glib: - arch: x86_64 epoch: null name: json-glib release: 1.el9 source: rpm version: 1.6.6 kbd: - arch: x86_64 epoch: null name: kbd release: 11.el9 source: rpm version: 2.4.0 kbd-legacy: - arch: noarch epoch: null name: kbd-legacy release: 11.el9 source: rpm version: 2.4.0 kbd-misc: - arch: noarch epoch: null name: kbd-misc release: 11.el9 source: rpm version: 2.4.0 kernel: - arch: x86_64 epoch: null name: kernel release: 661.el9 source: rpm version: 5.14.0 kernel-core: - arch: x86_64 epoch: null name: kernel-core release: 661.el9 source: rpm version: 5.14.0 kernel-headers: - arch: x86_64 epoch: null name: kernel-headers release: 661.el9 source: rpm version: 5.14.0 kernel-modules: - arch: x86_64 epoch: null name: kernel-modules release: 661.el9 source: rpm version: 5.14.0 kernel-modules-core: - arch: x86_64 epoch: null name: kernel-modules-core release: 661.el9 source: rpm version: 5.14.0 kernel-srpm-macros: - arch: noarch epoch: null name: kernel-srpm-macros release: 14.el9 source: rpm version: '1.0' kernel-tools: - arch: x86_64 epoch: null name: kernel-tools release: 661.el9 source: rpm version: 5.14.0 kernel-tools-libs: - arch: x86_64 epoch: null name: kernel-tools-libs release: 661.el9 source: rpm version: 5.14.0 kexec-tools: - arch: x86_64 epoch: null name: kexec-tools release: 14.el9 source: rpm version: 2.0.29 keyutils: - arch: x86_64 epoch: null name: keyutils release: 1.el9 source: rpm version: 1.6.3 keyutils-libs: - arch: x86_64 epoch: null name: keyutils-libs release: 1.el9 source: rpm version: 1.6.3 kmod: - arch: x86_64 epoch: null name: kmod release: 11.el9 source: rpm version: '28' kmod-libs: - arch: x86_64 epoch: null name: kmod-libs release: 11.el9 source: rpm version: '28' kpartx: - arch: x86_64 epoch: null name: kpartx release: 42.el9 source: rpm version: 0.8.7 krb5-libs: - arch: x86_64 epoch: null name: krb5-libs release: 8.el9 source: rpm version: 1.21.1 langpacks-core-en_GB: - arch: noarch epoch: null name: langpacks-core-en_GB release: 16.el9 source: rpm version: '3.0' langpacks-core-font-en: - arch: noarch epoch: null name: langpacks-core-font-en release: 16.el9 source: rpm version: '3.0' langpacks-en_GB: - arch: noarch epoch: null name: langpacks-en_GB release: 16.el9 source: rpm version: '3.0' less: - arch: x86_64 epoch: null name: less release: 6.el9 source: rpm version: '590' libacl: - arch: x86_64 epoch: null name: libacl release: 4.el9 source: rpm version: 2.3.1 libappstream-glib: - arch: x86_64 epoch: null name: libappstream-glib release: 5.el9 source: rpm version: 0.7.18 libarchive: - arch: x86_64 epoch: null name: libarchive release: 6.el9 source: rpm version: 3.5.3 libassuan: - arch: x86_64 epoch: null name: libassuan release: 3.el9 source: rpm version: 2.5.5 libattr: - arch: x86_64 epoch: null name: libattr release: 3.el9 source: rpm version: 2.5.1 libbasicobjects: - arch: x86_64 epoch: null name: libbasicobjects release: 53.el9 source: rpm version: 0.1.1 libblkid: - arch: x86_64 epoch: null name: libblkid release: 21.el9 source: rpm version: 2.37.4 libbpf: - arch: x86_64 epoch: 2 name: libbpf release: 3.el9 source: rpm version: 1.5.0 libbrotli: - arch: x86_64 epoch: null name: libbrotli release: 7.el9 source: rpm version: 1.0.9 libburn: - arch: x86_64 epoch: null name: libburn release: 5.el9 source: rpm version: 1.5.4 libcap: - arch: x86_64 epoch: null name: libcap release: 10.el9 source: rpm version: '2.48' libcap-ng: - arch: x86_64 epoch: null name: libcap-ng release: 7.el9 source: rpm version: 0.8.2 libcbor: - arch: x86_64 epoch: null name: libcbor release: 5.el9 source: rpm version: 0.7.0 libcollection: - arch: x86_64 epoch: null name: libcollection release: 53.el9 source: rpm version: 0.7.0 libcom_err: - arch: x86_64 epoch: null name: libcom_err release: 8.el9 source: rpm version: 1.46.5 libcomps: - arch: x86_64 epoch: null name: libcomps release: 1.el9 source: rpm version: 0.1.18 libcurl: - arch: x86_64 epoch: null name: libcurl release: 38.el9 source: rpm version: 7.76.1 libdaemon: - arch: x86_64 epoch: null name: libdaemon release: 23.el9 source: rpm version: '0.14' libdb: - arch: x86_64 epoch: null name: libdb release: 57.el9 source: rpm version: 5.3.28 libdhash: - arch: x86_64 epoch: null name: libdhash release: 53.el9 source: rpm version: 0.5.0 libdnf: - arch: x86_64 epoch: null name: libdnf release: 16.el9 source: rpm version: 0.69.0 libeconf: - arch: x86_64 epoch: null name: libeconf release: 5.el9 source: rpm version: 0.4.1 libedit: - arch: x86_64 epoch: null name: libedit release: 38.20210216cvs.el9 source: rpm version: '3.1' libestr: - arch: x86_64 epoch: null name: libestr release: 4.el9 source: rpm version: 0.1.11 libev: - arch: x86_64 epoch: null name: libev release: 6.el9 source: rpm version: '4.33' libevent: - arch: x86_64 epoch: null name: libevent release: 8.el9 source: rpm version: 2.1.12 libfastjson: - arch: x86_64 epoch: null name: libfastjson release: 5.el9 source: rpm version: 0.99.9 libfdisk: - arch: x86_64 epoch: null name: libfdisk release: 21.el9 source: rpm version: 2.37.4 libffi: - arch: x86_64 epoch: null name: libffi release: 8.el9 source: rpm version: 3.4.2 libffi-devel: - arch: x86_64 epoch: null name: libffi-devel release: 8.el9 source: rpm version: 3.4.2 libfido2: - arch: x86_64 epoch: null name: libfido2 release: 2.el9 source: rpm version: 1.13.0 libgcc: - arch: x86_64 epoch: null name: libgcc release: 14.el9 source: rpm version: 11.5.0 libgcrypt: - arch: x86_64 epoch: null name: libgcrypt release: 11.el9 source: rpm version: 1.10.0 libgomp: - arch: x86_64 epoch: null name: libgomp release: 14.el9 source: rpm version: 11.5.0 libgpg-error: - arch: x86_64 epoch: null name: libgpg-error release: 5.el9 source: rpm version: '1.42' libgpg-error-devel: - arch: x86_64 epoch: null name: libgpg-error-devel release: 5.el9 source: rpm version: '1.42' libibverbs: - arch: x86_64 epoch: null name: libibverbs release: 2.el9 source: rpm version: '57.0' libicu: - arch: x86_64 epoch: null name: libicu release: 10.el9 source: rpm version: '67.1' libidn2: - arch: x86_64 epoch: null name: libidn2 release: 7.el9 source: rpm version: 2.3.0 libini_config: - arch: x86_64 epoch: null name: libini_config release: 53.el9 source: rpm version: 1.3.1 libisoburn: - arch: x86_64 epoch: null name: libisoburn release: 5.el9 source: rpm version: 1.5.4 libisofs: - arch: x86_64 epoch: null name: libisofs release: 4.el9 source: rpm version: 1.5.4 libjpeg-turbo: - arch: x86_64 epoch: null name: libjpeg-turbo release: 7.el9 source: rpm version: 2.0.90 libkcapi: - arch: x86_64 epoch: null name: libkcapi release: 2.el9 source: rpm version: 1.4.0 libkcapi-hmaccalc: - arch: x86_64 epoch: null name: libkcapi-hmaccalc release: 2.el9 source: rpm version: 1.4.0 libksba: - arch: x86_64 epoch: null name: libksba release: 7.el9 source: rpm version: 1.5.1 libldb: - arch: x86_64 epoch: 0 name: libldb release: 2.el9 source: rpm version: 4.23.4 libmaxminddb: - arch: x86_64 epoch: null name: libmaxminddb release: 4.el9 source: rpm version: 1.5.2 libmnl: - arch: x86_64 epoch: null name: libmnl release: 16.el9 source: rpm version: 1.0.4 libmodulemd: - arch: x86_64 epoch: null name: libmodulemd release: 2.el9 source: rpm version: 2.13.0 libmount: - arch: x86_64 epoch: null name: libmount release: 21.el9 source: rpm version: 2.37.4 libmpc: - arch: x86_64 epoch: null name: libmpc release: 4.el9 source: rpm version: 1.2.1 libndp: - arch: x86_64 epoch: null name: libndp release: 1.el9 source: rpm version: '1.9' libnet: - arch: x86_64 epoch: null name: libnet release: 7.el9 source: rpm version: '1.2' libnetfilter_conntrack: - arch: x86_64 epoch: null name: libnetfilter_conntrack release: 1.el9 source: rpm version: 1.0.9 libnfnetlink: - arch: x86_64 epoch: null name: libnfnetlink release: 23.el9 source: rpm version: 1.0.1 libnfsidmap: - arch: x86_64 epoch: 1 name: libnfsidmap release: 41.el9 source: rpm version: 2.5.4 libnftnl: - arch: x86_64 epoch: null name: libnftnl release: 4.el9 source: rpm version: 1.2.6 libnghttp2: - arch: x86_64 epoch: null name: libnghttp2 release: 6.el9 source: rpm version: 1.43.0 libnl3: - arch: x86_64 epoch: null name: libnl3 release: 1.el9 source: rpm version: 3.11.0 libnl3-cli: - arch: x86_64 epoch: null name: libnl3-cli release: 1.el9 source: rpm version: 3.11.0 libosinfo: - arch: x86_64 epoch: null name: libosinfo release: 1.el9 source: rpm version: 1.10.0 libpath_utils: - arch: x86_64 epoch: null name: libpath_utils release: 53.el9 source: rpm version: 0.2.1 libpcap: - arch: x86_64 epoch: 14 name: libpcap release: 4.el9 source: rpm version: 1.10.0 libpipeline: - arch: x86_64 epoch: null name: libpipeline release: 4.el9 source: rpm version: 1.5.3 libpkgconf: - arch: x86_64 epoch: null name: libpkgconf release: 10.el9 source: rpm version: 1.7.3 libpng: - arch: x86_64 epoch: 2 name: libpng release: 12.el9 source: rpm version: 1.6.37 libproxy: - arch: x86_64 epoch: null name: libproxy release: 35.el9 source: rpm version: 0.4.15 libproxy-webkitgtk4: - arch: x86_64 epoch: null name: libproxy-webkitgtk4 release: 35.el9 source: rpm version: 0.4.15 libpsl: - arch: x86_64 epoch: null name: libpsl release: 5.el9 source: rpm version: 0.21.1 libpwquality: - arch: x86_64 epoch: null name: libpwquality release: 8.el9 source: rpm version: 1.4.4 libref_array: - arch: x86_64 epoch: null name: libref_array release: 53.el9 source: rpm version: 0.1.5 librepo: - arch: x86_64 epoch: null name: librepo release: 1.el9 source: rpm version: 1.19.0 libreport-filesystem: - arch: noarch epoch: null name: libreport-filesystem release: 6.el9 source: rpm version: 2.15.2 libseccomp: - arch: x86_64 epoch: null name: libseccomp release: 2.el9 source: rpm version: 2.5.2 libselinux: - arch: x86_64 epoch: null name: libselinux release: 3.el9 source: rpm version: '3.6' libselinux-utils: - arch: x86_64 epoch: null name: libselinux-utils release: 3.el9 source: rpm version: '3.6' libsemanage: - arch: x86_64 epoch: null name: libsemanage release: 5.el9 source: rpm version: '3.6' libsepol: - arch: x86_64 epoch: null name: libsepol release: 3.el9 source: rpm version: '3.6' libsigsegv: - arch: x86_64 epoch: null name: libsigsegv release: 4.el9 source: rpm version: '2.13' libslirp: - arch: x86_64 epoch: null name: libslirp release: 8.el9 source: rpm version: 4.4.0 libsmartcols: - arch: x86_64 epoch: null name: libsmartcols release: 21.el9 source: rpm version: 2.37.4 libsolv: - arch: x86_64 epoch: null name: libsolv release: 3.el9 source: rpm version: 0.7.24 libsoup: - arch: x86_64 epoch: null name: libsoup release: 10.el9 source: rpm version: 2.72.0 libss: - arch: x86_64 epoch: null name: libss release: 8.el9 source: rpm version: 1.46.5 libssh: - arch: x86_64 epoch: null name: libssh release: 17.el9 source: rpm version: 0.10.4 libssh-config: - arch: noarch epoch: null name: libssh-config release: 17.el9 source: rpm version: 0.10.4 libsss_certmap: - arch: x86_64 epoch: null name: libsss_certmap release: 5.el9 source: rpm version: 2.9.7 libsss_idmap: - arch: x86_64 epoch: null name: libsss_idmap release: 5.el9 source: rpm version: 2.9.7 libsss_nss_idmap: - arch: x86_64 epoch: null name: libsss_nss_idmap release: 5.el9 source: rpm version: 2.9.7 libsss_sudo: - arch: x86_64 epoch: null name: libsss_sudo release: 5.el9 source: rpm version: 2.9.7 libstdc++: - arch: x86_64 epoch: null name: libstdc++ release: 14.el9 source: rpm version: 11.5.0 libstdc++-devel: - arch: x86_64 epoch: null name: libstdc++-devel release: 14.el9 source: rpm version: 11.5.0 libstemmer: - arch: x86_64 epoch: null name: libstemmer release: 18.585svn.el9 source: rpm version: '0' libsysfs: - arch: x86_64 epoch: null name: libsysfs release: 11.el9 source: rpm version: 2.1.1 libtalloc: - arch: x86_64 epoch: null name: libtalloc release: 1.el9 source: rpm version: 2.4.3 libtasn1: - arch: x86_64 epoch: null name: libtasn1 release: 9.el9 source: rpm version: 4.16.0 libtdb: - arch: x86_64 epoch: null name: libtdb release: 1.el9 source: rpm version: 1.4.14 libteam: - arch: x86_64 epoch: null name: libteam release: 16.el9 source: rpm version: '1.31' libtevent: - arch: x86_64 epoch: null name: libtevent release: 1.el9 source: rpm version: 0.17.1 libtirpc: - arch: x86_64 epoch: null name: libtirpc release: 9.el9 source: rpm version: 1.3.3 libtool-ltdl: - arch: x86_64 epoch: null name: libtool-ltdl release: 46.el9 source: rpm version: 2.4.6 libunistring: - arch: x86_64 epoch: null name: libunistring release: 15.el9 source: rpm version: 0.9.10 liburing: - arch: x86_64 epoch: null name: liburing release: 1.el9 source: rpm version: '2.12' libuser: - arch: x86_64 epoch: null name: libuser release: 17.el9 source: rpm version: '0.63' libutempter: - arch: x86_64 epoch: null name: libutempter release: 6.el9 source: rpm version: 1.2.1 libuuid: - arch: x86_64 epoch: null name: libuuid release: 21.el9 source: rpm version: 2.37.4 libverto: - arch: x86_64 epoch: null name: libverto release: 3.el9 source: rpm version: 0.3.2 libverto-libev: - arch: x86_64 epoch: null name: libverto-libev release: 3.el9 source: rpm version: 0.3.2 libvirt-client: - arch: x86_64 epoch: null name: libvirt-client release: 2.el9 source: rpm version: 11.10.0 libvirt-libs: - arch: x86_64 epoch: null name: libvirt-libs release: 2.el9 source: rpm version: 11.10.0 libxcrypt: - arch: x86_64 epoch: null name: libxcrypt release: 3.el9 source: rpm version: 4.4.18 libxcrypt-compat: - arch: x86_64 epoch: null name: libxcrypt-compat release: 3.el9 source: rpm version: 4.4.18 libxcrypt-devel: - arch: x86_64 epoch: null name: libxcrypt-devel release: 3.el9 source: rpm version: 4.4.18 libxml2: - arch: x86_64 epoch: null name: libxml2 release: 14.el9 source: rpm version: 2.9.13 libxml2-devel: - arch: x86_64 epoch: null name: libxml2-devel release: 14.el9 source: rpm version: 2.9.13 libxslt: - arch: x86_64 epoch: null name: libxslt release: 12.el9 source: rpm version: 1.1.34 libxslt-devel: - arch: x86_64 epoch: null name: libxslt-devel release: 12.el9 source: rpm version: 1.1.34 libyaml: - arch: x86_64 epoch: null name: libyaml release: 7.el9 source: rpm version: 0.2.5 libzstd: - arch: x86_64 epoch: null name: libzstd release: 1.el9 source: rpm version: 1.5.5 llvm-filesystem: - arch: x86_64 epoch: null name: llvm-filesystem release: 1.el9 source: rpm version: 21.1.7 llvm-libs: - arch: x86_64 epoch: null name: llvm-libs release: 1.el9 source: rpm version: 21.1.7 lmdb-libs: - arch: x86_64 epoch: null name: lmdb-libs release: 3.el9 source: rpm version: 0.9.29 logrotate: - arch: x86_64 epoch: null name: logrotate release: 12.el9 source: rpm version: 3.18.0 lshw: - arch: x86_64 epoch: null name: lshw release: 4.el9 source: rpm version: B.02.20 lsscsi: - arch: x86_64 epoch: null name: lsscsi release: 6.el9 source: rpm version: '0.32' lua-libs: - arch: x86_64 epoch: null name: lua-libs release: 4.el9 source: rpm version: 5.4.4 lua-srpm-macros: - arch: noarch epoch: null name: lua-srpm-macros release: 6.el9 source: rpm version: '1' lz4-libs: - arch: x86_64 epoch: null name: lz4-libs release: 5.el9 source: rpm version: 1.9.3 lzo: - arch: x86_64 epoch: null name: lzo release: 7.el9 source: rpm version: '2.10' make: - arch: x86_64 epoch: 1 name: make release: 8.el9 source: rpm version: '4.3' man-db: - arch: x86_64 epoch: null name: man-db release: 9.el9 source: rpm version: 2.9.3 microcode_ctl: - arch: noarch epoch: 4 name: microcode_ctl release: 1.el9 source: rpm version: '20251111' mpfr: - arch: x86_64 epoch: null name: mpfr release: 8.el9 source: rpm version: 4.1.0 ncurses: - arch: x86_64 epoch: null name: ncurses release: 12.20210508.el9 source: rpm version: '6.2' ncurses-base: - arch: noarch epoch: null name: ncurses-base release: 12.20210508.el9 source: rpm version: '6.2' ncurses-c++-libs: - arch: x86_64 epoch: null name: ncurses-c++-libs release: 12.20210508.el9 source: rpm version: '6.2' ncurses-devel: - arch: x86_64 epoch: null name: ncurses-devel release: 12.20210508.el9 source: rpm version: '6.2' ncurses-libs: - arch: x86_64 epoch: null name: ncurses-libs release: 12.20210508.el9 source: rpm version: '6.2' netavark: - arch: x86_64 epoch: 2 name: netavark release: 1.el9 source: rpm version: 1.16.0 nettle: - arch: x86_64 epoch: null name: nettle release: 1.el9 source: rpm version: 3.10.1 newt: - arch: x86_64 epoch: null name: newt release: 11.el9 source: rpm version: 0.52.21 nfs-utils: - arch: x86_64 epoch: 1 name: nfs-utils release: 41.el9 source: rpm version: 2.5.4 nftables: - arch: x86_64 epoch: 1 name: nftables release: 6.el9 source: rpm version: 1.0.9 npth: - arch: x86_64 epoch: null name: npth release: 8.el9 source: rpm version: '1.6' numactl-libs: - arch: x86_64 epoch: null name: numactl-libs release: 3.el9 source: rpm version: 2.0.19 ocaml-srpm-macros: - arch: noarch epoch: null name: ocaml-srpm-macros release: 6.el9 source: rpm version: '6' oddjob: - arch: x86_64 epoch: null name: oddjob release: 7.el9 source: rpm version: 0.34.7 oddjob-mkhomedir: - arch: x86_64 epoch: null name: oddjob-mkhomedir release: 7.el9 source: rpm version: 0.34.7 oniguruma: - arch: x86_64 epoch: null name: oniguruma release: 1.el9.6 source: rpm version: 6.9.6 openblas-srpm-macros: - arch: noarch epoch: null name: openblas-srpm-macros release: 11.el9 source: rpm version: '2' openldap: - arch: x86_64 epoch: null name: openldap release: 4.el9 source: rpm version: 2.6.8 openldap-devel: - arch: x86_64 epoch: null name: openldap-devel release: 4.el9 source: rpm version: 2.6.8 openssh: - arch: x86_64 epoch: null name: openssh release: 3.el9 source: rpm version: 9.9p1 openssh-clients: - arch: x86_64 epoch: null name: openssh-clients release: 3.el9 source: rpm version: 9.9p1 openssh-server: - arch: x86_64 epoch: null name: openssh-server release: 3.el9 source: rpm version: 9.9p1 openssl: - arch: x86_64 epoch: 1 name: openssl release: 6.el9 source: rpm version: 3.5.1 openssl-devel: - arch: x86_64 epoch: 1 name: openssl-devel release: 6.el9 source: rpm version: 3.5.1 openssl-fips-provider: - arch: x86_64 epoch: 1 name: openssl-fips-provider release: 6.el9 source: rpm version: 3.5.1 openssl-libs: - arch: x86_64 epoch: 1 name: openssl-libs release: 6.el9 source: rpm version: 3.5.1 os-prober: - arch: x86_64 epoch: null name: os-prober release: 12.el9 source: rpm version: '1.77' osinfo-db: - arch: noarch epoch: null name: osinfo-db release: 1.el9 source: rpm version: '20250606' osinfo-db-tools: - arch: x86_64 epoch: null name: osinfo-db-tools release: 1.el9 source: rpm version: 1.10.0 p11-kit: - arch: x86_64 epoch: null name: p11-kit release: 1.el9 source: rpm version: 0.25.10 p11-kit-trust: - arch: x86_64 epoch: null name: p11-kit-trust release: 1.el9 source: rpm version: 0.25.10 pam: - arch: x86_64 epoch: null name: pam release: 28.el9 source: rpm version: 1.5.1 parted: - arch: x86_64 epoch: null name: parted release: 3.el9 source: rpm version: '3.5' passt: - arch: x86_64 epoch: null name: passt release: 2.el9 source: rpm version: 0^20251210.gd04c480 passt-selinux: - arch: noarch epoch: null name: passt-selinux release: 2.el9 source: rpm version: 0^20251210.gd04c480 passwd: - arch: x86_64 epoch: null name: passwd release: 12.el9 source: rpm version: '0.80' patch: - arch: x86_64 epoch: null name: patch release: 16.el9 source: rpm version: 2.7.6 pciutils-libs: - arch: x86_64 epoch: null name: pciutils-libs release: 7.el9 source: rpm version: 3.7.0 pcre: - arch: x86_64 epoch: null name: pcre release: 4.el9 source: rpm version: '8.44' pcre2: - arch: x86_64 epoch: null name: pcre2 release: 6.el9 source: rpm version: '10.40' pcre2-syntax: - arch: noarch epoch: null name: pcre2-syntax release: 6.el9 source: rpm version: '10.40' perl-AutoLoader: - arch: noarch epoch: 0 name: perl-AutoLoader release: 483.el9 source: rpm version: '5.74' perl-B: - arch: x86_64 epoch: 0 name: perl-B release: 483.el9 source: rpm version: '1.80' perl-Carp: - arch: noarch epoch: null name: perl-Carp release: 460.el9 source: rpm version: '1.50' perl-Class-Struct: - arch: noarch epoch: 0 name: perl-Class-Struct release: 483.el9 source: rpm version: '0.66' perl-Data-Dumper: - arch: x86_64 epoch: null name: perl-Data-Dumper release: 462.el9 source: rpm version: '2.174' perl-Digest: - arch: noarch epoch: null name: perl-Digest release: 4.el9 source: rpm version: '1.19' perl-Digest-MD5: - arch: x86_64 epoch: null name: perl-Digest-MD5 release: 4.el9 source: rpm version: '2.58' perl-DynaLoader: - arch: x86_64 epoch: 0 name: perl-DynaLoader release: 483.el9 source: rpm version: '1.47' perl-Encode: - arch: x86_64 epoch: 4 name: perl-Encode release: 462.el9 source: rpm version: '3.08' perl-Errno: - arch: x86_64 epoch: 0 name: perl-Errno release: 483.el9 source: rpm version: '1.30' perl-Error: - arch: noarch epoch: 1 name: perl-Error release: 7.el9 source: rpm version: '0.17029' perl-Exporter: - arch: noarch epoch: null name: perl-Exporter release: 461.el9 source: rpm version: '5.74' perl-Fcntl: - arch: x86_64 epoch: 0 name: perl-Fcntl release: 483.el9 source: rpm version: '1.13' perl-File-Basename: - arch: noarch epoch: 0 name: perl-File-Basename release: 483.el9 source: rpm version: '2.85' perl-File-Find: - arch: noarch epoch: 0 name: perl-File-Find release: 483.el9 source: rpm version: '1.37' perl-File-Path: - arch: noarch epoch: null name: perl-File-Path release: 4.el9 source: rpm version: '2.18' perl-File-Temp: - arch: noarch epoch: 1 name: perl-File-Temp release: 4.el9 source: rpm version: 0.231.100 perl-File-stat: - arch: noarch epoch: 0 name: perl-File-stat release: 483.el9 source: rpm version: '1.09' perl-FileHandle: - arch: noarch epoch: 0 name: perl-FileHandle release: 483.el9 source: rpm version: '2.03' perl-Getopt-Long: - arch: noarch epoch: 1 name: perl-Getopt-Long release: 4.el9 source: rpm version: '2.52' perl-Getopt-Std: - arch: noarch epoch: 0 name: perl-Getopt-Std release: 483.el9 source: rpm version: '1.12' perl-Git: - arch: noarch epoch: null name: perl-Git release: 1.el9 source: rpm version: 2.47.3 perl-HTTP-Tiny: - arch: noarch epoch: null name: perl-HTTP-Tiny release: 462.el9 source: rpm version: '0.076' perl-IO: - arch: x86_64 epoch: 0 name: perl-IO release: 483.el9 source: rpm version: '1.43' perl-IO-Socket-IP: - arch: noarch epoch: null name: perl-IO-Socket-IP release: 5.el9 source: rpm version: '0.41' perl-IO-Socket-SSL: - arch: noarch epoch: null name: perl-IO-Socket-SSL release: 2.el9 source: rpm version: '2.073' perl-IPC-Open3: - arch: noarch epoch: 0 name: perl-IPC-Open3 release: 483.el9 source: rpm version: '1.21' perl-MIME-Base64: - arch: x86_64 epoch: null name: perl-MIME-Base64 release: 4.el9 source: rpm version: '3.16' perl-Mozilla-CA: - arch: noarch epoch: null name: perl-Mozilla-CA release: 6.el9 source: rpm version: '20200520' perl-NDBM_File: - arch: x86_64 epoch: 0 name: perl-NDBM_File release: 483.el9 source: rpm version: '1.15' perl-Net-SSLeay: - arch: x86_64 epoch: null name: perl-Net-SSLeay release: 3.el9 source: rpm version: '1.94' perl-POSIX: - arch: x86_64 epoch: 0 name: perl-POSIX release: 483.el9 source: rpm version: '1.94' perl-PathTools: - arch: x86_64 epoch: null name: perl-PathTools release: 461.el9 source: rpm version: '3.78' perl-Pod-Escapes: - arch: noarch epoch: 1 name: perl-Pod-Escapes release: 460.el9 source: rpm version: '1.07' perl-Pod-Perldoc: - arch: noarch epoch: null name: perl-Pod-Perldoc release: 461.el9 source: rpm version: 3.28.01 perl-Pod-Simple: - arch: noarch epoch: 1 name: perl-Pod-Simple release: 4.el9 source: rpm version: '3.42' perl-Pod-Usage: - arch: noarch epoch: 4 name: perl-Pod-Usage release: 4.el9 source: rpm version: '2.01' perl-Scalar-List-Utils: - arch: x86_64 epoch: 4 name: perl-Scalar-List-Utils release: 462.el9 source: rpm version: '1.56' perl-SelectSaver: - arch: noarch epoch: 0 name: perl-SelectSaver release: 483.el9 source: rpm version: '1.02' perl-Socket: - arch: x86_64 epoch: 4 name: perl-Socket release: 4.el9 source: rpm version: '2.031' perl-Storable: - arch: x86_64 epoch: 1 name: perl-Storable release: 460.el9 source: rpm version: '3.21' perl-Symbol: - arch: noarch epoch: 0 name: perl-Symbol release: 483.el9 source: rpm version: '1.08' perl-Term-ANSIColor: - arch: noarch epoch: null name: perl-Term-ANSIColor release: 461.el9 source: rpm version: '5.01' perl-Term-Cap: - arch: noarch epoch: null name: perl-Term-Cap release: 460.el9 source: rpm version: '1.17' perl-TermReadKey: - arch: x86_64 epoch: null name: perl-TermReadKey release: 11.el9 source: rpm version: '2.38' perl-Text-ParseWords: - arch: noarch epoch: null name: perl-Text-ParseWords release: 460.el9 source: rpm version: '3.30' perl-Text-Tabs+Wrap: - arch: noarch epoch: null name: perl-Text-Tabs+Wrap release: 460.el9 source: rpm version: '2013.0523' perl-Time-Local: - arch: noarch epoch: 2 name: perl-Time-Local release: 7.el9 source: rpm version: '1.300' perl-URI: - arch: noarch epoch: null name: perl-URI release: 3.el9 source: rpm version: '5.09' perl-base: - arch: noarch epoch: 0 name: perl-base release: 483.el9 source: rpm version: '2.27' perl-constant: - arch: noarch epoch: null name: perl-constant release: 461.el9 source: rpm version: '1.33' perl-if: - arch: noarch epoch: 0 name: perl-if release: 483.el9 source: rpm version: 0.60.800 perl-interpreter: - arch: x86_64 epoch: 4 name: perl-interpreter release: 483.el9 source: rpm version: 5.32.1 perl-lib: - arch: x86_64 epoch: 0 name: perl-lib release: 483.el9 source: rpm version: '0.65' perl-libnet: - arch: noarch epoch: null name: perl-libnet release: 4.el9 source: rpm version: '3.13' perl-libs: - arch: x86_64 epoch: 4 name: perl-libs release: 483.el9 source: rpm version: 5.32.1 perl-mro: - arch: x86_64 epoch: 0 name: perl-mro release: 483.el9 source: rpm version: '1.23' perl-overload: - arch: noarch epoch: 0 name: perl-overload release: 483.el9 source: rpm version: '1.31' perl-overloading: - arch: noarch epoch: 0 name: perl-overloading release: 483.el9 source: rpm version: '0.02' perl-parent: - arch: noarch epoch: 1 name: perl-parent release: 460.el9 source: rpm version: '0.238' perl-podlators: - arch: noarch epoch: 1 name: perl-podlators release: 460.el9 source: rpm version: '4.14' perl-srpm-macros: - arch: noarch epoch: null name: perl-srpm-macros release: 41.el9 source: rpm version: '1' perl-subs: - arch: noarch epoch: 0 name: perl-subs release: 483.el9 source: rpm version: '1.03' perl-vars: - arch: noarch epoch: 0 name: perl-vars release: 483.el9 source: rpm version: '1.05' pigz: - arch: x86_64 epoch: null name: pigz release: 4.el9 source: rpm version: '2.5' pkgconf: - arch: x86_64 epoch: null name: pkgconf release: 10.el9 source: rpm version: 1.7.3 pkgconf-m4: - arch: noarch epoch: null name: pkgconf-m4 release: 10.el9 source: rpm version: 1.7.3 pkgconf-pkg-config: - arch: x86_64 epoch: null name: pkgconf-pkg-config release: 10.el9 source: rpm version: 1.7.3 podman: - arch: x86_64 epoch: 6 name: podman release: 2.el9 source: rpm version: 5.6.0 policycoreutils: - arch: x86_64 epoch: null name: policycoreutils release: 4.el9 source: rpm version: '3.6' policycoreutils-python-utils: - arch: noarch epoch: null name: policycoreutils-python-utils release: 4.el9 source: rpm version: '3.6' polkit: - arch: x86_64 epoch: null name: polkit release: 14.el9 source: rpm version: '0.117' polkit-libs: - arch: x86_64 epoch: null name: polkit-libs release: 14.el9 source: rpm version: '0.117' polkit-pkla-compat: - arch: x86_64 epoch: null name: polkit-pkla-compat release: 21.el9 source: rpm version: '0.1' popt: - arch: x86_64 epoch: null name: popt release: 8.el9 source: rpm version: '1.18' prefixdevname: - arch: x86_64 epoch: null name: prefixdevname release: 8.el9 source: rpm version: 0.1.0 procps-ng: - arch: x86_64 epoch: null name: procps-ng release: 14.el9 source: rpm version: 3.3.17 protobuf-c: - arch: x86_64 epoch: null name: protobuf-c release: 13.el9 source: rpm version: 1.3.3 psmisc: - arch: x86_64 epoch: null name: psmisc release: 3.el9 source: rpm version: '23.4' publicsuffix-list-dafsa: - arch: noarch epoch: null name: publicsuffix-list-dafsa release: 3.el9 source: rpm version: '20210518' pyproject-srpm-macros: - arch: noarch epoch: null name: pyproject-srpm-macros release: 1.el9 source: rpm version: 1.18.5 python-rpm-macros: - arch: noarch epoch: null name: python-rpm-macros release: 54.el9 source: rpm version: '3.9' python-srpm-macros: - arch: noarch epoch: null name: python-srpm-macros release: 54.el9 source: rpm version: '3.9' python-unversioned-command: - arch: noarch epoch: null name: python-unversioned-command release: 3.el9 source: rpm version: 3.9.25 python3: - arch: x86_64 epoch: null name: python3 release: 3.el9 source: rpm version: 3.9.25 python3-argcomplete: - arch: noarch epoch: null name: python3-argcomplete release: 5.el9 source: rpm version: 1.12.0 python3-attrs: - arch: noarch epoch: null name: python3-attrs release: 7.el9 source: rpm version: 20.3.0 python3-audit: - arch: x86_64 epoch: null name: python3-audit release: 8.el9 source: rpm version: 3.1.5 python3-babel: - arch: noarch epoch: null name: python3-babel release: 2.el9 source: rpm version: 2.9.1 python3-cffi: - arch: x86_64 epoch: null name: python3-cffi release: 5.el9 source: rpm version: 1.14.5 python3-chardet: - arch: noarch epoch: null name: python3-chardet release: 5.el9 source: rpm version: 4.0.0 python3-configobj: - arch: noarch epoch: null name: python3-configobj release: 25.el9 source: rpm version: 5.0.6 python3-cryptography: - arch: x86_64 epoch: null name: python3-cryptography release: 5.el9 source: rpm version: 36.0.1 python3-dasbus: - arch: noarch epoch: null name: python3-dasbus release: 1.el9 source: rpm version: '1.7' python3-dateutil: - arch: noarch epoch: 1 name: python3-dateutil release: 1.el9 source: rpm version: 2.9.0.post0 python3-dbus: - arch: x86_64 epoch: null name: python3-dbus release: 2.el9 source: rpm version: 1.2.18 python3-devel: - arch: x86_64 epoch: null name: python3-devel release: 3.el9 source: rpm version: 3.9.25 python3-distro: - arch: noarch epoch: null name: python3-distro release: 7.el9 source: rpm version: 1.5.0 python3-dnf: - arch: noarch epoch: null name: python3-dnf release: 31.el9 source: rpm version: 4.14.0 python3-dnf-plugins-core: - arch: noarch epoch: null name: python3-dnf-plugins-core release: 25.el9 source: rpm version: 4.3.0 python3-enchant: - arch: noarch epoch: null name: python3-enchant release: 5.el9 source: rpm version: 3.2.0 python3-file-magic: - arch: noarch epoch: null name: python3-file-magic release: 16.el9 source: rpm version: '5.39' python3-gobject-base: - arch: x86_64 epoch: null name: python3-gobject-base release: 6.el9 source: rpm version: 3.40.1 python3-gobject-base-noarch: - arch: noarch epoch: null name: python3-gobject-base-noarch release: 6.el9 source: rpm version: 3.40.1 python3-gpg: - arch: x86_64 epoch: null name: python3-gpg release: 6.el9 source: rpm version: 1.15.1 python3-hawkey: - arch: x86_64 epoch: null name: python3-hawkey release: 16.el9 source: rpm version: 0.69.0 python3-idna: - arch: noarch epoch: null name: python3-idna release: 7.el9.1 source: rpm version: '2.10' python3-jinja2: - arch: noarch epoch: null name: python3-jinja2 release: 8.el9 source: rpm version: 2.11.3 python3-jmespath: - arch: noarch epoch: null name: python3-jmespath release: 1.el9 source: rpm version: 1.0.1 python3-jsonpatch: - arch: noarch epoch: null name: python3-jsonpatch release: 16.el9 source: rpm version: '1.21' python3-jsonpointer: - arch: noarch epoch: null name: python3-jsonpointer release: 4.el9 source: rpm version: '2.0' python3-jsonschema: - arch: noarch epoch: null name: python3-jsonschema release: 13.el9 source: rpm version: 3.2.0 python3-libcomps: - arch: x86_64 epoch: null name: python3-libcomps release: 1.el9 source: rpm version: 0.1.18 python3-libdnf: - arch: x86_64 epoch: null name: python3-libdnf release: 16.el9 source: rpm version: 0.69.0 python3-libs: - arch: x86_64 epoch: null name: python3-libs release: 3.el9 source: rpm version: 3.9.25 python3-libselinux: - arch: x86_64 epoch: null name: python3-libselinux release: 3.el9 source: rpm version: '3.6' python3-libsemanage: - arch: x86_64 epoch: null name: python3-libsemanage release: 5.el9 source: rpm version: '3.6' python3-libvirt: - arch: x86_64 epoch: null name: python3-libvirt release: 1.el9 source: rpm version: 11.10.0 python3-libxml2: - arch: x86_64 epoch: null name: python3-libxml2 release: 14.el9 source: rpm version: 2.9.13 python3-lxml: - arch: x86_64 epoch: null name: python3-lxml release: 3.el9 source: rpm version: 4.6.5 python3-markupsafe: - arch: x86_64 epoch: null name: python3-markupsafe release: 12.el9 source: rpm version: 1.1.1 python3-netaddr: - arch: noarch epoch: null name: python3-netaddr release: 3.el9 source: rpm version: 0.10.1 python3-netifaces: - arch: x86_64 epoch: null name: python3-netifaces release: 15.el9 source: rpm version: 0.10.6 python3-oauthlib: - arch: noarch epoch: null name: python3-oauthlib release: 5.el9 source: rpm version: 3.1.1 python3-packaging: - arch: noarch epoch: null name: python3-packaging release: 5.el9 source: rpm version: '20.9' python3-pexpect: - arch: noarch epoch: null name: python3-pexpect release: 7.el9 source: rpm version: 4.8.0 python3-pip: - arch: noarch epoch: null name: python3-pip release: 1.el9 source: rpm version: 21.3.1 python3-pip-wheel: - arch: noarch epoch: null name: python3-pip-wheel release: 1.el9 source: rpm version: 21.3.1 python3-ply: - arch: noarch epoch: null name: python3-ply release: 14.el9 source: rpm version: '3.11' python3-policycoreutils: - arch: noarch epoch: null name: python3-policycoreutils release: 4.el9 source: rpm version: '3.6' python3-prettytable: - arch: noarch epoch: null name: python3-prettytable release: 27.el9 source: rpm version: 0.7.2 python3-ptyprocess: - arch: noarch epoch: null name: python3-ptyprocess release: 12.el9 source: rpm version: 0.6.0 python3-pycparser: - arch: noarch epoch: null name: python3-pycparser release: 6.el9 source: rpm version: '2.20' python3-pyparsing: - arch: noarch epoch: null name: python3-pyparsing release: 9.el9 source: rpm version: 2.4.7 python3-pyrsistent: - arch: x86_64 epoch: null name: python3-pyrsistent release: 8.el9 source: rpm version: 0.17.3 python3-pyserial: - arch: noarch epoch: null name: python3-pyserial release: 12.el9 source: rpm version: '3.4' python3-pysocks: - arch: noarch epoch: null name: python3-pysocks release: 12.el9 source: rpm version: 1.7.1 python3-pytz: - arch: noarch epoch: null name: python3-pytz release: 5.el9 source: rpm version: '2021.1' python3-pyyaml: - arch: x86_64 epoch: null name: python3-pyyaml release: 6.el9 source: rpm version: 5.4.1 python3-requests: - arch: noarch epoch: null name: python3-requests release: 10.el9 source: rpm version: 2.25.1 python3-resolvelib: - arch: noarch epoch: null name: python3-resolvelib release: 5.el9 source: rpm version: 0.5.4 python3-rpm: - arch: x86_64 epoch: null name: python3-rpm release: 40.el9 source: rpm version: 4.16.1.3 python3-rpm-generators: - arch: noarch epoch: null name: python3-rpm-generators release: 9.el9 source: rpm version: '12' python3-rpm-macros: - arch: noarch epoch: null name: python3-rpm-macros release: 54.el9 source: rpm version: '3.9' python3-setools: - arch: x86_64 epoch: null name: python3-setools release: 1.el9 source: rpm version: 4.4.4 python3-setuptools: - arch: noarch epoch: null name: python3-setuptools release: 15.el9 source: rpm version: 53.0.0 python3-setuptools-wheel: - arch: noarch epoch: null name: python3-setuptools-wheel release: 15.el9 source: rpm version: 53.0.0 python3-six: - arch: noarch epoch: null name: python3-six release: 9.el9 source: rpm version: 1.15.0 python3-systemd: - arch: x86_64 epoch: null name: python3-systemd release: 19.el9 source: rpm version: '234' python3-urllib3: - arch: noarch epoch: null name: python3-urllib3 release: 6.el9 source: rpm version: 1.26.5 qemu-guest-agent: - arch: x86_64 epoch: 17 name: qemu-guest-agent release: 10.el9 source: rpm version: 10.1.0 qt5-srpm-macros: - arch: noarch epoch: null name: qt5-srpm-macros release: 1.el9 source: rpm version: 5.15.9 quota: - arch: x86_64 epoch: 1 name: quota release: 4.el9 source: rpm version: '4.09' quota-nls: - arch: noarch epoch: 1 name: quota-nls release: 4.el9 source: rpm version: '4.09' readline: - arch: x86_64 epoch: null name: readline release: 4.el9 source: rpm version: '8.1' readline-devel: - arch: x86_64 epoch: null name: readline-devel release: 4.el9 source: rpm version: '8.1' redhat-rpm-config: - arch: noarch epoch: null name: redhat-rpm-config release: 1.el9 source: rpm version: '210' rootfiles: - arch: noarch epoch: null name: rootfiles release: 35.el9 source: rpm version: '8.1' rpcbind: - arch: x86_64 epoch: null name: rpcbind release: 7.el9 source: rpm version: 1.2.6 rpm: - arch: x86_64 epoch: null name: rpm release: 40.el9 source: rpm version: 4.16.1.3 rpm-build: - arch: x86_64 epoch: null name: rpm-build release: 40.el9 source: rpm version: 4.16.1.3 rpm-build-libs: - arch: x86_64 epoch: null name: rpm-build-libs release: 40.el9 source: rpm version: 4.16.1.3 rpm-libs: - arch: x86_64 epoch: null name: rpm-libs release: 40.el9 source: rpm version: 4.16.1.3 rpm-plugin-audit: - arch: x86_64 epoch: null name: rpm-plugin-audit release: 40.el9 source: rpm version: 4.16.1.3 rpm-plugin-selinux: - arch: x86_64 epoch: null name: rpm-plugin-selinux release: 40.el9 source: rpm version: 4.16.1.3 rpm-plugin-systemd-inhibit: - arch: x86_64 epoch: null name: rpm-plugin-systemd-inhibit release: 40.el9 source: rpm version: 4.16.1.3 rpm-sign: - arch: x86_64 epoch: null name: rpm-sign release: 40.el9 source: rpm version: 4.16.1.3 rpm-sign-libs: - arch: x86_64 epoch: null name: rpm-sign-libs release: 40.el9 source: rpm version: 4.16.1.3 rpmlint: - arch: noarch epoch: null name: rpmlint release: 19.el9 source: rpm version: '1.11' rsync: - arch: x86_64 epoch: null name: rsync release: 4.el9 source: rpm version: 3.2.5 rsyslog: - arch: x86_64 epoch: null name: rsyslog release: 2.el9 source: rpm version: 8.2510.0 rsyslog-logrotate: - arch: x86_64 epoch: null name: rsyslog-logrotate release: 2.el9 source: rpm version: 8.2510.0 ruby: - arch: x86_64 epoch: null name: ruby release: 165.el9 source: rpm version: 3.0.7 ruby-default-gems: - arch: noarch epoch: null name: ruby-default-gems release: 165.el9 source: rpm version: 3.0.7 ruby-devel: - arch: x86_64 epoch: null name: ruby-devel release: 165.el9 source: rpm version: 3.0.7 ruby-libs: - arch: x86_64 epoch: null name: ruby-libs release: 165.el9 source: rpm version: 3.0.7 rubygem-bigdecimal: - arch: x86_64 epoch: null name: rubygem-bigdecimal release: 165.el9 source: rpm version: 3.0.0 rubygem-bundler: - arch: noarch epoch: null name: rubygem-bundler release: 165.el9 source: rpm version: 2.2.33 rubygem-io-console: - arch: x86_64 epoch: null name: rubygem-io-console release: 165.el9 source: rpm version: 0.5.7 rubygem-json: - arch: x86_64 epoch: null name: rubygem-json release: 165.el9 source: rpm version: 2.5.1 rubygem-psych: - arch: x86_64 epoch: null name: rubygem-psych release: 165.el9 source: rpm version: 3.3.2 rubygem-rdoc: - arch: noarch epoch: null name: rubygem-rdoc release: 165.el9 source: rpm version: 6.3.4.1 rubygems: - arch: noarch epoch: null name: rubygems release: 165.el9 source: rpm version: 3.2.33 rust-srpm-macros: - arch: noarch epoch: null name: rust-srpm-macros release: 4.el9 source: rpm version: '17' sed: - arch: x86_64 epoch: null name: sed release: 9.el9 source: rpm version: '4.8' selinux-policy: - arch: noarch epoch: null name: selinux-policy release: 1.el9 source: rpm version: 38.1.71 selinux-policy-targeted: - arch: noarch epoch: null name: selinux-policy-targeted release: 1.el9 source: rpm version: 38.1.71 setroubleshoot-plugins: - arch: noarch epoch: null name: setroubleshoot-plugins release: 4.el9 source: rpm version: 3.3.14 setroubleshoot-server: - arch: x86_64 epoch: null name: setroubleshoot-server release: 2.el9 source: rpm version: 3.3.35 setup: - arch: noarch epoch: null name: setup release: 10.el9 source: rpm version: 2.13.7 sg3_utils: - arch: x86_64 epoch: null name: sg3_utils release: 10.el9 source: rpm version: '1.47' sg3_utils-libs: - arch: x86_64 epoch: null name: sg3_utils-libs release: 10.el9 source: rpm version: '1.47' shadow-utils: - arch: x86_64 epoch: 2 name: shadow-utils release: 16.el9 source: rpm version: '4.9' shadow-utils-subid: - arch: x86_64 epoch: 2 name: shadow-utils-subid release: 16.el9 source: rpm version: '4.9' shared-mime-info: - arch: x86_64 epoch: null name: shared-mime-info release: 5.el9 source: rpm version: '2.1' skopeo: - arch: x86_64 epoch: 2 name: skopeo release: 2.el9 source: rpm version: 1.20.0 slang: - arch: x86_64 epoch: null name: slang release: 11.el9 source: rpm version: 2.3.2 slirp4netns: - arch: x86_64 epoch: null name: slirp4netns release: 1.el9 source: rpm version: 1.3.3 snappy: - arch: x86_64 epoch: null name: snappy release: 8.el9 source: rpm version: 1.1.8 sos: - arch: noarch epoch: null name: sos release: 2.el9 source: rpm version: 4.10.1 sqlite: - arch: x86_64 epoch: null name: sqlite release: 9.el9 source: rpm version: 3.34.1 sqlite-libs: - arch: x86_64 epoch: null name: sqlite-libs release: 9.el9 source: rpm version: 3.34.1 squashfs-tools: - arch: x86_64 epoch: null name: squashfs-tools release: 10.git1.el9 source: rpm version: '4.4' sscg: - arch: x86_64 epoch: null name: sscg release: 2.el9 source: rpm version: 4.0.3 sshpass: - arch: x86_64 epoch: null name: sshpass release: 4.el9 source: rpm version: '1.09' sssd-client: - arch: x86_64 epoch: null name: sssd-client release: 5.el9 source: rpm version: 2.9.7 sssd-common: - arch: x86_64 epoch: null name: sssd-common release: 5.el9 source: rpm version: 2.9.7 sssd-kcm: - arch: x86_64 epoch: null name: sssd-kcm release: 5.el9 source: rpm version: 2.9.7 sssd-nfs-idmap: - arch: x86_64 epoch: null name: sssd-nfs-idmap release: 5.el9 source: rpm version: 2.9.7 sudo: - arch: x86_64 epoch: null name: sudo release: 13.el9 source: rpm version: 1.9.5p2 systemd: - arch: x86_64 epoch: null name: systemd release: 64.el9 source: rpm version: '252' systemd-devel: - arch: x86_64 epoch: null name: systemd-devel release: 64.el9 source: rpm version: '252' systemd-libs: - arch: x86_64 epoch: null name: systemd-libs release: 64.el9 source: rpm version: '252' systemd-pam: - arch: x86_64 epoch: null name: systemd-pam release: 64.el9 source: rpm version: '252' systemd-rpm-macros: - arch: noarch epoch: null name: systemd-rpm-macros release: 64.el9 source: rpm version: '252' systemd-udev: - arch: x86_64 epoch: null name: systemd-udev release: 64.el9 source: rpm version: '252' tar: - arch: x86_64 epoch: 2 name: tar release: 9.el9 source: rpm version: '1.34' tcl: - arch: x86_64 epoch: 1 name: tcl release: 7.el9 source: rpm version: 8.6.10 tcpdump: - arch: x86_64 epoch: 14 name: tcpdump release: 9.el9 source: rpm version: 4.99.0 teamd: - arch: x86_64 epoch: null name: teamd release: 16.el9 source: rpm version: '1.31' time: - arch: x86_64 epoch: null name: time release: 18.el9 source: rpm version: '1.9' tmux: - arch: x86_64 epoch: null name: tmux release: 5.el9 source: rpm version: 3.2a tpm2-tss: - arch: x86_64 epoch: null name: tpm2-tss release: 1.el9 source: rpm version: 3.2.3 traceroute: - arch: x86_64 epoch: 3 name: traceroute release: 1.el9 source: rpm version: 2.1.1 tzdata: - arch: noarch epoch: null name: tzdata release: 1.el9 source: rpm version: 2025c unzip: - arch: x86_64 epoch: null name: unzip release: 59.el9 source: rpm version: '6.0' userspace-rcu: - arch: x86_64 epoch: null name: userspace-rcu release: 6.el9 source: rpm version: 0.12.1 util-linux: - arch: x86_64 epoch: null name: util-linux release: 21.el9 source: rpm version: 2.37.4 util-linux-core: - arch: x86_64 epoch: null name: util-linux-core release: 21.el9 source: rpm version: 2.37.4 vim-minimal: - arch: x86_64 epoch: 2 name: vim-minimal release: 23.el9 source: rpm version: 8.2.2637 virt-install: - arch: noarch epoch: null name: virt-install release: 1.el9 source: rpm version: 5.0.0 virt-manager-common: - arch: noarch epoch: null name: virt-manager-common release: 1.el9 source: rpm version: 5.0.0 webkit2gtk3-jsc: - arch: x86_64 epoch: null name: webkit2gtk3-jsc release: 1.el9 source: rpm version: 2.50.4 wget: - arch: x86_64 epoch: null name: wget release: 8.el9 source: rpm version: 1.21.1 which: - arch: x86_64 epoch: null name: which release: 30.el9 source: rpm version: '2.21' xfsprogs: - arch: x86_64 epoch: null name: xfsprogs release: 7.el9 source: rpm version: 6.4.0 xmlstarlet: - arch: x86_64 epoch: null name: xmlstarlet release: 20.el9 source: rpm version: 1.6.1 xorriso: - arch: x86_64 epoch: null name: xorriso release: 5.el9 source: rpm version: 1.5.4 xz: - arch: x86_64 epoch: null name: xz release: 8.el9 source: rpm version: 5.2.5 xz-devel: - arch: x86_64 epoch: null name: xz-devel release: 8.el9 source: rpm version: 5.2.5 xz-libs: - arch: x86_64 epoch: null name: xz-libs release: 8.el9 source: rpm version: 5.2.5 yajl: - arch: x86_64 epoch: null name: yajl release: 25.el9 source: rpm version: 2.1.0 yum: - arch: noarch epoch: null name: yum release: 31.el9 source: rpm version: 4.14.0 yum-utils: - arch: noarch epoch: null name: yum-utils release: 25.el9 source: rpm version: 4.3.0 zip: - arch: x86_64 epoch: null name: zip release: 35.el9 source: rpm version: '3.0' zlib: - arch: x86_64 epoch: null name: zlib release: 41.el9 source: rpm version: 1.2.11 zlib-devel: - arch: x86_64 epoch: null name: zlib-devel release: 41.el9 source: rpm version: 1.2.11 zstd: - arch: x86_64 epoch: null name: zstd release: 1.el9 source: rpm version: 1.5.5 home/zuul/zuul-output/logs/ci-framework-data/artifacts/pre_deploy_fetch_compute_facts.yml0000644000175000017500000000022315134411344031312 0ustar zuulzuulcifmw_edpm_deploy_extra_vars: {} cifmw_edpm_prepare_extra_vars: NETWORK_MTU: 1500 NNCP_DNS_SERVER: 192.168.122.10 NNCP_INTERFACE: ens7 home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/0000755000175000017500000000000015134432023024331 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/0000755000175000017500000000000015134437263027306 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/namespace.yaml0000644000175000017500000000030115134411477032117 0ustar zuulzuulapiVersion: v1 kind: Namespace metadata: name: metallb-system labels: pod-security.kubernetes.io/enforce: privileged security.openshift.io/scc.podSecurityLabelSync: "false" home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/0000755000175000017500000000000015134411500030710 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/0000755000175000017500000000000015134437263031332 5ustar zuulzuul././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/deploy_operator.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/deploy_op0000644000175000017500000000026215134411500033231 0ustar zuulzuulapiVersion: metallb.io/v1beta1 kind: MetalLB metadata: name: metallb namespace: metallb-system spec: logLevel: debug nodeSelector: node-role.kubernetes.io/worker: "" ././@LongLink0000644000000000000000000000015700000000000011606 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/ipaddresspools.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/ipaddress0000644000175000017500000000145715134411701033227 0ustar zuulzuul--- apiVersion: metallb.io/v1beta1 kind: IPAddressPool metadata: namespace: metallb-system name: ctlplane spec: addresses: - 192.168.122.80-192.168.122.90 --- apiVersion: metallb.io/v1beta1 kind: IPAddressPool metadata: namespace: metallb-system name: internalapi spec: addresses: - 172.17.0.80-172.17.0.90 --- apiVersion: metallb.io/v1beta1 kind: IPAddressPool metadata: namespace: metallb-system name: storage spec: addresses: - 172.18.0.80-172.18.0.90 --- apiVersion: metallb.io/v1beta1 kind: IPAddressPool metadata: namespace: metallb-system name: tenant spec: addresses: - 172.19.0.80-172.19.0.90 --- apiVersion: metallb.io/v1beta1 kind: IPAddressPool metadata: namespace: metallb-system name: designateext spec: autoAssign: false addresses: - 172.50.0.80-172.50.0.90 ././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/l2advertisement.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/l2adverti0000644000175000017500000000156115134411701033141 0ustar zuulzuul--- apiVersion: metallb.io/v1beta1 kind: L2Advertisement metadata: name: ctlplane namespace: metallb-system spec: ipAddressPools: - ctlplane interfaces: - ospbr --- apiVersion: metallb.io/v1beta1 kind: L2Advertisement metadata: name: internalapi namespace: metallb-system spec: ipAddressPools: - internalapi interfaces: - ens7.20 --- apiVersion: metallb.io/v1beta1 kind: L2Advertisement metadata: name: storage namespace: metallb-system spec: ipAddressPools: - storage interfaces: - ens7.21 --- apiVersion: metallb.io/v1beta1 kind: L2Advertisement metadata: name: tenant namespace: metallb-system spec: ipAddressPools: - tenant interfaces: - ens7.22 --- apiVersion: metallb.io/v1beta1 kind: L2Advertisement metadata: name: designateext namespace: metallb-system spec: ipAddressPools: - designateext interfaces: - ens7.26 ././@LongLink0000644000000000000000000000016100000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgpadvertisement.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgpadvert0000644000175000017500000000037315134411701033223 0ustar zuulzuul--- apiVersion: metallb.io/v1beta1 kind: BGPAdvertisement metadata: name: bgpadvertisement namespace: metallb-system spec: ipAddressPools: - ctlplane - internalapi - storage - tenant - designateext peers: - bgp-peer - bgp-peer-2 ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgpextras.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgpextras0000644000175000017500000000144315134411701033243 0ustar zuulzuul--- apiVersion: v1 kind: ConfigMap metadata: namespace: metallb-system name: bgpextras data: extras: | router bgp 64999 network 172.30.4.2/32 neighbor 100.65.4.1 allowas-in origin neighbor 100.64.4.1 allowas-in origin ! ip prefix-list osp permit 172.16.0.0/16 le 32 route-map 100.65.4.1-in permit 20 ! match ip address prefix-list osp set src 172.30.4.2 route-map 100.64.4.1-in permit 20 ! match ip address prefix-list osp set src 172.30.4.2 ip protocol bgp route-map 100.65.4.1-in ip protocol bgp route-map 100.64.4.1-in ip prefix-list ocp-lo permit 172.30.4.2/32 route-map 100.65.4.1-out permit 3 match ip address prefix-list ocp-lo route-map 100.64.4.1-out permit 3 match ip address prefix-list ocp-lo ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgppeers.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr/bgppeers.0000644000175000017500000000066615134437263033152 0ustar zuulzuul--- apiVersion: metallb.io/v1beta2 kind: BGPPeer metadata: name: bgp-peer namespace: metallb-system spec: myASN: 64999 peerASN: 64999 peerAddress: 100.65.4.1 password: f0**********rZ routerID: 172.30.4.2 --- apiVersion: metallb.io/v1beta2 kind: BGPPeer metadata: name: bgp-peer-2 namespace: metallb-system spec: myASN: 64999 peerASN: 64999 peerAddress: 100.64.4.1 password: f0**********rZ routerID: 172.30.4.2 home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager-operator/0000755000175000017500000000000015134437263030542 5ustar zuulzuul././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager-operator/namespace.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager-operator/namespace.yam0000644000175000017500000000031015134411605033170 0ustar zuulzuulapiVersion: v1 kind: Namespace metadata: name: cert-manager-operator labels: pod-security.kubernetes.io/enforce: privileged security.openshift.io/scc.podSecurityLabelSync: "false" home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/0000755000175000017500000000000015134411605033077 5ustar zuulzuul././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/op/home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/0000755000175000017500000000000015134437263033107 5ustar zuulzuul././@LongLink0000644000000000000000000000017200000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/op/operatorgroup.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/0000644000175000017500000000101715134411605033100 0ustar zuulzuulapiVersion: operators.coreos.com/v1 kind: OperatorGroup metadata: annotations: olm.providedAPIs: CertManager.v1alpha1.operator.openshift.io,Certificate.v1.cert-manager.io,CertificateRequest.v1.cert-manager.io,Challenge.v1.acme.cert-manager.io,ClusterIssuer.v1.cert-manager.io,Issuer.v1.cert-manager.io,Order.v1.acme.cert-manager.io generateName: cert-manager-operator- name: cert-manager-operator-bccwx namespace: cert-manager-operator spec: targetNamespaces: - cert-manager-operator upgradeStrategy: Default ././@LongLink0000644000000000000000000000017100000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/op/subscription.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/0000644000175000017500000000062715134411605033106 0ustar zuulzuulapiVersion: operators.coreos.com/v1alpha1 kind: Subscription metadata: labels: operators.coreos.com/openshift-cert-manager-operator.cert-manager-operator: "" name: openshift-cert-manager-operator namespace: cert-manager-operator spec: channel: stable-v1 installPlanApproval: Automatic name: openshift-cert-manager-operator source: redhat-operators sourceNamespace: openshift-marketplace home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/0000755000175000017500000000000015134411342027435 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/dataplane/0000755000175000017500000000000015134411342031366 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/controlplane/0000755000175000017500000000000015134437263032147 5ustar zuulzuul././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/controlplane/99-kustomization.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/kustomizations/controlplane/99-kust0000644000175000017500000000062215134411343033306 0ustar zuulzuulapiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: namespace: openstack patches: - target: kind: OpenStackControlPlane patch: |- - op: replace path: /spec/dns/template/options value: [ { "key": "server", "values": [ "192.168.122.10" ] }, { "key": "no-negcache", "values": [] } ]home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/crc-storage/0000755000175000017500000000000015134437263026555 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/crc-storage/namespace.yaml0000644000175000017500000000027615134411362031372 0ustar zuulzuulapiVersion: v1 kind: Namespace metadata: name: crc-storage labels: pod-security.kubernetes.io/enforce: privileged security.openshift.io/scc.podSecurityLabelSync: "false" home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/crc/0000755000175000017500000000000015134437263025113 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/crc/storage.yaml0000644000175000017500000001773615134411406027450 0ustar zuulzuulkind: StorageClass apiVersion: storage.k8s.io/v1 metadata: name: "local-storage" provisioner: kubernetes.io/no-provisioner volumeBindingMode: WaitForFirstConsumer allowVolumeExpansion: true --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage01-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv01" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage02-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv02" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage03-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv03" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage04-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv04" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage05-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv05" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage06-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv06" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage07-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv07" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage08-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv08" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage09-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv09" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage10-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv10" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage11-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv11" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: "local-storage12-crc" annotations: pv.kubernetes.io/provisioned-by: crc-devsetup labels: provisioned-by: crc-devsetup spec: storageClassName: "local-storage" capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: "/mnt/openstack/pv12" type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- apiVersion: v1 kind: PersistentVolumeClaim metadata: name: ansible-ee-logs namespace: crc-storage annotations: pv.kubernetes.io/provisioned-by: crc-devsetup spec: resources: requests: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany storageClassName: "local-storage" home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openshift-nmstate/0000755000175000017500000000000015134437263030014 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openshift-nmstate/namespace.yaml0000644000175000017500000000030415134411412032615 0ustar zuulzuulapiVersion: v1 kind: Namespace metadata: name: openshift-nmstate labels: pod-security.kubernetes.io/enforce: privileged security.openshift.io/scc.podSecurityLabelSync: "false" home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/0000755000175000017500000000000015134411412031453 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/cr/0000755000175000017500000000000015134437263032073 5ustar zuulzuul././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/cr/deploy_operator.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/cr/deploy0000644000175000017500000000010215134411412033267 0ustar zuulzuulapiVersion: nmstate.io/v1 kind: NMState metadata: name: nmstate home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/0000755000175000017500000000000015134437263030347 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/metallb/0000755000175000017500000000000015134411500031751 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op/0000755000175000017500000000000015134437263032405 5ustar zuulzuul././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op/operatorgroup.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op/oper0000644000175000017500000000016715134411500033263 0ustar zuulzuulapiVersion: operators.coreos.com/v1 kind: OperatorGroup metadata: name: metallb-operator namespace: metallb-system ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op/subscription.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op/subs0000644000175000017500000000036515134411500033272 0ustar zuulzuulapiVersion: operators.coreos.com/v1alpha1 kind: Subscription metadata: name: metallb-operator-sub namespace: metallb-system spec: channel: stable name: metallb-operator source: redhat-operators sourceNamespace: openshift-marketplace home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/openstack/0000755000175000017500000000000015134411701032323 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op/0000755000175000017500000000000015134437263032754 5ustar zuulzuul././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op/operatorgroup.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op/op0000644000175000017500000000016515134411701033304 0ustar zuulzuulapiVersion: operators.coreos.com/v1 kind: OperatorGroup metadata: name: openstack namespace: openstack-operators ././@LongLink0000644000000000000000000000016500000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op/catalogsource.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op/ca0000644000175000017500000000035415134411701033251 0ustar zuulzuulapiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource metadata: name: openstack-operator-index namespace: openstack-operators spec: image: quay.io/openstack-k8s-operators/openstack-operator-index:latest sourceType: grpc ././@LongLink0000644000000000000000000000016400000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op/subscription.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op/su0000644000175000017500000000037715134411701033322 0ustar zuulzuulapiVersion: operators.coreos.com/v1alpha1 kind: Subscription metadata: name: openstack-operator namespace: openstack-operators spec: name: openstack-operator channel: alpha source: openstack-operator-index sourceNamespace: openstack-operators home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/namespace.yaml0000644000175000017500000000030615134411701033153 0ustar zuulzuulapiVersion: v1 kind: Namespace metadata: name: openstack-operators labels: pod-security.kubernetes.io/enforce: privileged security.openshift.io/scc.podSecurityLabelSync: "false" home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/0000755000175000017500000000000015134411412032006 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op/0000755000175000017500000000000015134437263032440 5ustar zuulzuul././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op/operatorgroup.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op/oper0000644000175000017500000000041715134411412033316 0ustar zuulzuulapiVersion: operators.coreos.com/v1 kind: OperatorGroup metadata: annotations: olm.providedAPIs: NMState.v1.nmstate.io generateName: openshift-nmstate- name: openshift-nmstate-tn6k8 namespace: openshift-nmstate spec: targetNamespaces: - openshift-nmstate ././@LongLink0000644000000000000000000000016200000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op/subscription.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op/subs0000644000175000017500000000060015134411412033317 0ustar zuulzuulapiVersion: operators.coreos.com/v1alpha1 kind: Subscription metadata: labels: operators.coreos.com/kubernetes-nmstate-operator.openshift-nmstate: "" name: kubernetes-nmstate-operator namespace: openshift-nmstate spec: channel: stable installPlanApproval: Automatic name: kubernetes-nmstate-operator source: redhat-operators sourceNamespace: openshift-marketplace home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/infra/0000755000175000017500000000000015134412154031436 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack-operators/infra/op/0000755000175000017500000000000015134412154032054 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/0000755000175000017500000000000015134437263026333 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/0000755000175000017500000000000015134411700030272 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/0000755000175000017500000000000015134437263030712 5ustar zuulzuul././@LongLink0000644000000000000000000000014600000000000011604 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/ctlplane.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/ctlplane.yam0000644000175000017500000000064515134411700033215 0ustar zuulzuulapiVersion: k8s.cni.cncf.io/v1 kind: NetworkAttachmentDefinition metadata: name: ctlplane namespace: openstack spec: config: | { "cniVersion": "0.3.1", "name": "ctlplane", "type": "macvlan", "master": "ospbr", "ipam": { "type": "whereabouts", "range": "192.168.122.0/24", "range_start": "192.168.122.30", "range_end": "192.168.122.70" } } ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/internalapi.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/internalapi.0000644000175000017500000000064415134411700033211 0ustar zuulzuulapiVersion: k8s.cni.cncf.io/v1 kind: NetworkAttachmentDefinition metadata: name: internalapi namespace: openstack spec: config: | { "cniVersion": "0.3.1", "name": "internalapi", "type": "macvlan", "master": "ens7.20", "ipam": { "type": "whereabouts", "range": "172.17.0.0/24", "range_start": "172.17.0.30", "range_end": "172.17.0.70" } } home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/storage.yaml0000644000175000017500000000063415134411700033231 0ustar zuulzuulapiVersion: k8s.cni.cncf.io/v1 kind: NetworkAttachmentDefinition metadata: name: storage namespace: openstack spec: config: | { "cniVersion": "0.3.1", "name": "storage", "type": "macvlan", "master": "ens7.21", "ipam": { "type": "whereabouts", "range": "172.18.0.0/24", "range_start": "172.18.0.30", "range_end": "172.18.0.70" } } home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/tenant.yaml0000644000175000017500000000063215134411700033054 0ustar zuulzuulapiVersion: k8s.cni.cncf.io/v1 kind: NetworkAttachmentDefinition metadata: name: tenant namespace: openstack spec: config: | { "cniVersion": "0.3.1", "name": "tenant", "type": "macvlan", "master": "ens7.22", "ipam": { "type": "whereabouts", "range": "172.19.0.0/24", "range_start": "172.19.0.30", "range_end": "172.19.0.70" } } ././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/datacentre.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/datacentre.y0000644000175000017500000000041615134411700033203 0ustar zuulzuulapiVersion: k8s.cni.cncf.io/v1 kind: NetworkAttachmentDefinition metadata: name: datacentre namespace: openstack spec: config: | { "cniVersion": "0.3.1", "name": "datacentre", "type": "bridge", "bridge": "ospbr", "ipam": {} } ././@LongLink0000644000000000000000000000015100000000000011600 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/storagemgmt.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/storagemgmt.0000644000175000017500000000064415134411700033234 0ustar zuulzuulapiVersion: k8s.cni.cncf.io/v1 kind: NetworkAttachmentDefinition metadata: name: storagemgmt namespace: openstack spec: config: | { "cniVersion": "0.3.1", "name": "storagemgmt", "type": "macvlan", "master": "ens7.23", "ipam": { "type": "whereabouts", "range": "172.20.0.0/24", "range_start": "172.20.0.30", "range_end": "172.20.0.70" } } home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/octavia.yaml0000644000175000017500000000103315134411700033205 0ustar zuulzuulapiVersion: k8s.cni.cncf.io/v1 kind: NetworkAttachmentDefinition metadata: name: octavia namespace: openstack spec: config: | { "cniVersion": "0.3.1", "name": "octavia", "type": "bridge", "bridge": "octbr", "ipam": { "type": "whereabouts", "range": "172.23.0.0/24", "range_start": "172.23.0.30", "range_end": "172.23.0.70", "routes": [ { "dst": "172.24.0.0/16", "gw" : "172.23.0.150" } ] } } ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/designate.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/designate.ya0000644000175000017500000000064015134411700033174 0ustar zuulzuulapiVersion: k8s.cni.cncf.io/v1 kind: NetworkAttachmentDefinition metadata: name: designate namespace: openstack spec: config: | { "cniVersion": "0.3.1", "name": "designate", "type": "macvlan", "master": "ens7.25", "ipam": { "type": "whereabouts", "range": "172.28.0.0/24", "range_start": "172.28.0.30", "range_end": "172.28.0.70" } } ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/designateext.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/netattach/cr/designateext0000644000175000017500000000064515134411700033312 0ustar zuulzuulapiVersion: k8s.cni.cncf.io/v1 kind: NetworkAttachmentDefinition metadata: name: designateext namespace: openstack spec: config: | { "cniVersion": "0.3.1", "name": "designatext", "type": "macvlan", "master": "ens7.26", "ipam": { "type": "whereabouts", "range": "172.50.0.0/24", "range_start": "172.50.0.30", "range_end": "172.50.0.70" } } home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/input/0000755000175000017500000000000015134437263027472 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/input/kustomization.yaml0000644000175000017500000000256115134437263033302 0ustar zuulzuulapiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: labels: - pairs: created-by: install_yamls secretGenerator: - name: osp-secret literals: - AdminPassword=12**********78 - AodhPassword=12**********78 - BarbicanPassword=12**********78 - BarbicanSimpleCryptoKEK=sE**********U= - CeilometerPassword=12**********78 - CloudKittyPassword=12345678 - DbRootPassword=12**********78 - DatabasePassword=12**********78 - DesignatePassword=12**********78 - PlacementPassword=12**********78 - GlancePassword=12**********78 - NeutronPassword=12**********78 - CinderPassword=12**********78 - IronicPassword=12**********78 - IronicInspectorPassword=12**********78 - KeystoneOIDCClientSecret=COX8bmlKAWn56XCGMrKQJj7dgHNAOl6f - KeystoneOIDCCryptoPassphrase=openstack - OctaviaPassword=12**********78 - OctaviaHeartbeatKey=12**********78 - NovaPassword=12**********78 - ManilaPassword=12**********78 - MetadataSecret=12**********42 - HeatPassword=12**********78 - HeatAuthEncryptionKey=76**********f0 - HeatStackDomainAdminPassword=12345678 - SwiftPassword=12345678 - WatcherPassword=12345678 - name: libvirt-secret literals: - LibvirtPassword=12**********78 - name: octavia-ca-passphrase literals: - server-ca-passphrase=12**********78 generatorOptions: disableNameSuffixHash: true labels: type: osp-secret home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/openstack/0000755000175000017500000000000015134412002030302 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/openstack/cr/0000755000175000017500000000000015134437263030726 5ustar zuulzuul././@LongLink0000644000000000000000000000017000000000000011601 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/openstack/cr/cifmw-kustomization-result.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/openstack/cr/cifmw-kustom0000644000175000017500000003225715134437263033307 0ustar zuulzuulapiVersion: core.openstack.org/v1beta1 kind: OpenStackControlPlane metadata: labels: created-by: install_yamls name: controlplane namespace: openstack spec: barbican: apiOverride: route: {} template: barbicanAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 barbicanKeystoneListener: replicas: 1 barbicanWorker: replicas: 1 databaseInstance: openstack secret: os**********et cinder: apiOverride: route: {} template: cinderAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer cinderBackup: networkAttachments: - storage replicas: 0 cinderScheduler: replicas: 1 cinderVolumes: volume1: networkAttachments: - storage replicas: 0 databaseInstance: openstack secret: os**********et designate: apiOverride: route: {} enabled: false template: databaseInstance: openstack designateAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer designateBackendbind9: networkAttachments: - designate replicas: 1 storageClass: local-storage storageRequest: 10G designateCentral: replicas: 1 designateMdns: networkAttachments: - designate replicas: 1 designateProducer: replicas: 1 designateWorker: networkAttachments: - designate replicas: 1 secret: os**********et dns: template: options: - key: server values: - 192.168.122.10 - key: no-negcache values: [] override: service: metadata: annotations: metallb.universe.tf/address-pool: ctlplane metallb.universe.tf/allow-shared-ip: ctlplane metallb.universe.tf/loadBalancerIPs: 192.168.122.80 spec: type: LoadBalancer replicas: 1 galera: templates: openstack: replicas: 1 secret: os**********et storageRequest: 10G openstack-cell1: replicas: 1 secret: os**********et storageRequest: 10G glance: apiOverrides: default: route: {} template: customServiceConfig: | [DEFAULT] enabled_backends = default_backend:swift [glance_store] default_backend = default_backend [default_backend] swift_store_create_container_on_put = True swift_store_auth_version = 3 swift_store_auth_address = {{ .KeystoneInternalURL }} swift_store_endpoint_type = internalURL swift_store_user = service:glance swift_store_key = {{ .ServicePassword }} databaseInstance: openstack glanceAPIs: default: networkAttachments: - storage override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 keystoneEndpoint: default secret: os**********et storage: storageClass: "" storageRequest: 10G heat: apiOverride: route: {} cnfAPIOverride: route: {} enabled: false template: databaseInstance: openstack heatAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 heatEngine: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 secret: os**********et horizon: apiOverride: route: {} template: replicas: 1 secret: os**********et ironic: enabled: false template: databaseInstance: openstack ironicAPI: replicas: 1 ironicConductors: - replicas: 1 storageRequest: 10G ironicInspector: replicas: 1 ironicNeutronAgent: replicas: 1 secret: os**********et keystone: apiOverride: route: {} template: databaseInstance: openstack override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: os**********et manila: apiOverride: route: {} template: databaseInstance: openstack manilaAPI: networkAttachments: - internalapi override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 manilaScheduler: replicas: 1 manilaShares: share1: networkAttachments: - storage replicas: 1 memcached: templates: memcached: replicas: 1 neutron: apiOverride: route: {} template: databaseInstance: openstack networkAttachments: - internalapi override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: os**********et nova: apiOverride: route: {} template: apiServiceTemplate: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer cellTemplates: cell0: cellDatabaseAccount: nova-cell0 cellDatabaseInstance: openstack cellMessageBusInstance: rabbitmq conductorServiceTemplate: replicas: 1 hasAPIAccess: true cell1: cellDatabaseAccount: nova-cell1 cellDatabaseInstance: openstack-cell1 cellMessageBusInstance: rabbitmq-cell1 conductorServiceTemplate: replicas: 1 hasAPIAccess: true metadataServiceTemplate: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: os**********et octavia: enabled: false template: databaseInstance: openstack octaviaAPI: replicas: 1 secret: os**********et ovn: template: ovnController: networkAttachment: tenant nicMappings: datacentre: ospbr ovnDBCluster: ovndbcluster-nb: dbType: NB networkAttachment: internalapi storageRequest: 10G ovndbcluster-sb: dbType: SB networkAttachment: internalapi storageRequest: 10G placement: apiOverride: route: {} template: databaseInstance: openstack override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: os**********et rabbitmq: templates: rabbitmq: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.85 spec: type: LoadBalancer rabbitmq-cell1: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.86 spec: type: LoadBalancer redis: enabled: false secret: os**********et storageClass: local-storage swift: enabled: true proxyOverride: route: {} template: swiftProxy: networkAttachments: - storage override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 swiftRing: ringReplicas: 1 swiftStorage: networkAttachments: - storage replicas: 1 telemetry: enabled: true template: autoscaling: aodh: databaseAccount: aodh databaseInstance: openstack passwordSelectors: null secret: os**********et enabled: false heatInstance: heat ceilometer: enabled: true secret: os**********et cloudkitty: apiTimeout: 0 cloudKittyAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 resources: {} tls: api: internal: {} public: {} caBundleSecretName: combined-ca-bundle cloudKittyProc: replicas: 1 resources: {} tls: caBundleSecretName: combined-ca-bundle databaseAccount: cloudkitty databaseInstance: openstack enabled: false memcachedInstance: memcached passwordSelector: aodhService: AodhPassword ceilometerService: CeilometerPassword cloudKittyService: CloudKittyPassword preserveJobs: false rabbitMqClusterName: rabbitmq s3StorageConfig: schemas: - effectiveDate: "2024-11-18" version: v13 secret: name: cloudkitty-loki-s3 type: s3 secret: os**********et serviceUser: cloudkitty storageClass: local-storage logging: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 cloNamespace: openshift-logging enabled: false ipaddr: 172.17.0.80 port: 10514 metricStorage: enabled: false monitoringStack: alertingEnabled: true scrapeInterval: 30s storage: persistent: pvcStorageRequest: 10G retention: 24h strategy: persistent ././@LongLink0000644000000000000000000000023100000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/openstack/cr/core_v1beta1_openstackcontrolplane_galera_network_isolation.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/openstack/cr/core_v1beta10000644000175000017500000003215515134437263033132 0ustar zuulzuulapiVersion: core.openstack.org/v1beta1 kind: OpenStackControlPlane metadata: name: openstack-galera-network-isolation spec: secret: os**********et storageClass: local-storage dns: template: override: service: metadata: annotations: metallb.universe.tf/address-pool: ctlplane metallb.universe.tf/allow-shared-ip: ctlplane metallb.universe.tf/loadBalancerIPs: 192.168.122.80 spec: type: LoadBalancer options: - key: server values: - 192.168.122.1 replicas: 1 cinder: apiOverride: route: {} template: databaseInstance: openstack secret: os**********et cinderAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer cinderScheduler: replicas: 1 cinderBackup: networkAttachments: - storage replicas: 0 # backend needs to be configured cinderVolumes: volume1: networkAttachments: - storage replicas: 0 # backend needs to be configured glance: apiOverrides: default: route: {} template: customServiceConfig: | [DEFAULT] enabled_backends = default_backend:swift [glance_store] default_backend = default_backend [default_backend] swift_store_create_container_on_put = True swift_store_auth_version = 3 swift_store_auth_address = {{ .KeystoneInternalURL }} swift_store_endpoint_type = internalURL swift_store_user = service:glance swift_store_key = {{ .ServicePassword }} databaseInstance: openstack storage: storageClass: "" storageRequest: 10G secret: os**********et keystoneEndpoint: default glanceAPIs: default: replicas: 1 override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer networkAttachments: - storage keystone: apiOverride: route: {} template: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer databaseInstance: openstack secret: os**********et galera: templates: openstack: storageRequest: 500M secret: os**********et replicas: 1 openstack-cell1: storageRequest: 500M secret: os**********et replicas: 1 memcached: templates: memcached: replicas: 1 neutron: apiOverride: route: {} template: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer databaseInstance: openstack secret: os**********et networkAttachments: - internalapi barbican: apiOverride: route: {} template: databaseInstance: openstack secret: os**********et barbicanAPI: replicas: 1 override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer barbicanWorker: replicas: 1 barbicanKeystoneListener: replicas: 1 horizon: apiOverride: route: {} template: replicas: 1 secret: os**********et nova: apiOverride: route: {} template: cellTemplates: cell0: cellDatabaseAccount: nova-cell0 cellDatabaseInstance: openstack cellMessageBusInstance: rabbitmq conductorServiceTemplate: replicas: 1 hasAPIAccess: true cell1: cellDatabaseAccount: nova-cell1 cellDatabaseInstance: openstack-cell1 cellMessageBusInstance: rabbitmq-cell1 conductorServiceTemplate: replicas: 1 hasAPIAccess: true apiServiceTemplate: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer metadataServiceTemplate: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer secret: os**********et manila: apiOverride: route: {} template: databaseInstance: openstack manilaAPI: replicas: 1 networkAttachments: - internalapi override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer manilaScheduler: replicas: 1 manilaShares: share1: replicas: 1 networkAttachments: - storage ovn: template: ovnDBCluster: ovndbcluster-nb: dbType: NB storageRequest: 10G networkAttachment: internalapi ovndbcluster-sb: dbType: SB storageRequest: 10G networkAttachment: internalapi ovnController: networkAttachment: tenant placement: apiOverride: route: {} template: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer databaseInstance: openstack secret: os**********et rabbitmq: templates: rabbitmq: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.85 spec: type: LoadBalancer rabbitmq-cell1: override: service: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.86 spec: type: LoadBalancer heat: apiOverride: route: {} cnfAPIOverride: route: {} enabled: false template: databaseInstance: openstack heatAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 heatEngine: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 secret: os**********et ironic: enabled: false template: databaseInstance: openstack ironicAPI: replicas: 1 ironicConductors: - replicas: 1 storageRequest: 10G ironicInspector: replicas: 1 ironicNeutronAgent: replicas: 1 secret: os**********et telemetry: enabled: true template: metricStorage: enabled: false monitoringStack: alertingEnabled: true scrapeInterval: 30s storage: strategy: persistent retention: 24h persistent: pvcStorageRequest: 10G autoscaling: enabled: false aodh: passwordSelectors: databaseAccount: aodh databaseInstance: openstack secret: os**********et heatInstance: heat ceilometer: enabled: true secret: os**********et logging: enabled: false ipaddr: 172.17.0.80 annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 port: 10514 cloNamespace: openshift-logging cloudkitty: apiTimeout: 0 cloudKittyAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer replicas: 1 resources: {} tls: api: internal: {} public: {} caBundleSecretName: combined-ca-bundle cloudKittyProc: replicas: 1 resources: {} tls: caBundleSecretName: combined-ca-bundle databaseAccount: cloudkitty databaseInstance: openstack enabled: false memcachedInstance: memcached passwordSelector: aodhService: AodhPassword ceilometerService: CeilometerPassword cloudKittyService: CloudKittyPassword preserveJobs: false rabbitMqClusterName: rabbitmq s3StorageConfig: schemas: - effectiveDate: "2024-11-18" version: v13 secret: name: cloudkitty-loki-s3 type: s3 secret: os**********et serviceUser: cloudkitty storageClass: local-storage swift: enabled: true proxyOverride: route: {} template: swiftRing: ringReplicas: 1 swiftStorage: replicas: 1 networkAttachments: - storage swiftProxy: replicas: 1 override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer networkAttachments: - storage octavia: enabled: false template: databaseInstance: openstack octaviaAPI: replicas: 1 secret: os**********et redis: enabled: false designate: enabled: false apiOverride: route: {} template: databaseInstance: openstack secret: os**********et designateAPI: override: service: internal: metadata: annotations: metallb.universe.tf/address-pool: internalapi metallb.universe.tf/allow-shared-ip: internalapi metallb.universe.tf/loadBalancerIPs: 172.17.0.80 spec: type: LoadBalancer designateCentral: replicas: 1 designateWorker: replicas: 1 networkAttachments: - designate designateProducer: replicas: 1 designateMdns: replicas: 1 networkAttachments: - designate designateBackendbind9: replicas: 1 storageClass: local-storage storageRequest: 10G networkAttachments: - designate ././@LongLink0000644000000000000000000000015300000000000011602 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/openstack/cr/kustomization.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/openstack/cr/kustomizatio0000644000175000017500000000225015134412153033401 0ustar zuulzuulapiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: - ./core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml namespace: openstack labels: - pairs: created-by: install_yamls patches: - patch: |- - op: replace path: /spec/secret value: osp-secret - op: replace path: /spec/storageClass value: "local-storage" - op: replace path: /spec/galera/templates/openstack/storageRequest value: 10G - op: replace path: /spec/galera/templates/openstack-cell1/storageRequest value: 10G target: kind: OpenStackControlPlane - patch: |- apiVersion: core.openstack.org/v1beta1 kind: OpenStackControlPlane metadata: name: unused spec: ovn: template: ovnController: nicMappings: datacentre: ospbr target: kind: OpenStackControlPlane - patch: |- apiVersion: core.openstack.org/v1beta1 kind: OpenStackControlPlane metadata: name: unused spec: dns: template: options: - key: server values: - 192.168.122.10 target: kind: OpenStackControlPlane home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/namespace.yaml0000644000175000017500000000027415134412153031145 0ustar zuulzuulapiVersion: v1 kind: Namespace metadata: name: openstack labels: pod-security.kubernetes.io/enforce: privileged security.openshift.io/scc.podSecurityLabelSync: "false" home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/infra/0000755000175000017500000000000015134412154027422 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/infra/cr/0000755000175000017500000000000015134437263030036 5ustar zuulzuul././@LongLink0000644000000000000000000000016300000000000011603 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/infra/cr/network_v1beta1_netconfig.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/infra/cr/network_v1beta1_0000644000175000017500000000263615134412157033136 0ustar zuulzuulapiVersion: network.openstack.org/v1beta1 kind: NetConfig metadata: name: netconfig spec: networks: - name: ctlplane dnsDomain: ctlplane.example.com subnets: - name: subnet1 allocationRanges: - end: 192.168.122.120 start: 192.168.122.100 - end: 192.168.122.200 start: 192.168.122.150 cidr: 192.168.122.0/24 gateway: 192.168.122.1 - name: internalapi dnsDomain: internalapi.example.com subnets: - name: subnet1 allocationRanges: - end: 172.17.0.250 start: 172.17.0.100 cidr: 172.17.0.0/24 vlan: 20 - name: external dnsDomain: external.example.com subnets: - name: subnet1 allocationRanges: - end: 10.0.0.250 start: 10.0.0.100 cidr: 10.0.0.0/24 gateway: 10.0.0.1 - name: storage dnsDomain: storage.example.com subnets: - name: subnet1 allocationRanges: - end: 172.18.0.250 start: 172.18.0.100 cidr: 172.18.0.0/24 vlan: 21 - name: storagemgmt dnsDomain: storagemgmt.example.com subnets: - name: subnet1 allocationRanges: - end: 172.20.0.250 start: 172.20.0.100 cidr: 172.20.0.0/24 vlan: 23 - name: tenant dnsDomain: tenant.example.com subnets: - name: subnet1 allocationRanges: - end: 172.19.0.250 start: 172.19.0.100 cidr: 172.19.0.0/24 vlan: 22 ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/infra/cr/kustomization.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/infra/cr/kustomization.ya0000644000175000017500000000310215134412157033300 0ustar zuulzuulapiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: - ./network_v1beta1_netconfig.yaml - ./vlan-config.yaml namespace: openstack labels: - pairs: created-by: install_yamls patches: - patch: |- - op: replace path: /spec/secret value: osp-secret - op: replace path: /spec/storageClass value: "local-storage" - op: replace path: /spec/networks/0/mtu value: 1500 - op: replace path: /spec/networks/1/mtu value: 1500 - op: replace path: /spec/networks/2/mtu value: 1500 - op: replace path: /spec/networks/3/mtu value: 1500 - op: replace path: /spec/networks/4/mtu value: 1500 - op: replace path: /spec/networks/5/mtu value: 1500 target: kind: NetConfig replacements: - source: fieldPath: data.internalapi kind: ConfigMap name: vlan-config targets: - fieldPaths: - spec.networks.[name=internalapi].subnets.0.vlan select: kind: NetConfig - source: fieldPath: data.storage kind: ConfigMap name: vlan-config targets: - fieldPaths: - spec.networks.[name=storage].subnets.0.vlan select: kind: NetConfig - source: fieldPath: data.storagemgmt kind: ConfigMap name: vlan-config targets: - fieldPaths: - spec.networks.[name=storagemgmt].subnets.0.vlan select: kind: NetConfig - source: fieldPath: data.tenant kind: ConfigMap name: vlan-config targets: - fieldPaths: - spec.networks.[name=tenant].subnets.0.vlan select: kind: NetConfig home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/infra/cr/vlan-config.yaml0000644000175000017500000000027715134412157033126 0ustar zuulzuulapiVersion: v1 kind: ConfigMap metadata: annotations: config.kubernetes.io/local-config: "true" name: vlan-config data: internalapi: 20 storage: 21 storagemgmt: 23 tenant: 22 home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/cr/0000755000175000017500000000000015134411247026731 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/nncp/0000755000175000017500000000000015134411464027264 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/nncp/cr/0000755000175000017500000000000015134437263027675 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/openstack/nncp/cr/crc_nncp.yaml0000644000175000017500000000603615134411464032346 0ustar zuulzuulapiVersion: nmstate.io/v1 kind: NodeNetworkConfigurationPolicy metadata: labels: osp/interface: ens7 name: ens7-crc spec: desiredState: interfaces: - description: internalapi vlan interface name: ens7.20 state: up type: vlan vlan: base-iface: ens7 id: 20 reorder-headers: true ipv4: address: - ip: 172.17.0.5 prefix-length: 24 enabled: true dhcp: false ipv6: enabled: false - description: storage vlan interface name: ens7.21 state: up type: vlan vlan: base-iface: ens7 id: 21 reorder-headers: true ipv4: address: - ip: 172.18.0.5 prefix-length: 24 enabled: true dhcp: false ipv6: enabled: false - description: tenant vlan interface name: ens7.22 state: up type: vlan vlan: base-iface: ens7 id: 22 reorder-headers: true ipv4: address: - ip: 172.19.0.5 prefix-length: 24 enabled: true dhcp: false ipv6: enabled: false - description: storagemgmt vlan interface name: ens7.23 state: up type: vlan vlan: base-iface: ens7 id: 23 reorder-headers: true ipv4: address: - ip: 172.20.0.5 prefix-length: 24 enabled: true dhcp: false ipv6: enabled: false - description: Octavia vlan host interface name: ens7.24 state: up type: vlan vlan: base-iface: ens7 id: 24 - bridge: options: stp: enabled: false port: - name: ens7.24 description: Configuring bridge octbr mtu: 1500 name: octbr state: up type: linux-bridge - description: designate vlan interface name: ens7.25 state: up type: vlan vlan: base-iface: ens7 id: 25 reorder-headers: true ipv4: address: - ip: 172.28.0.5 prefix-length: 24 enabled: true dhcp: false ipv6: enabled: false - description: designate external vlan interface name: ens7.26 state: up type: vlan vlan: base-iface: ens7 id: 26 reorder-headers: true ipv4: address: - ip: 172.50.0.5 prefix-length: 24 enabled: true dhcp: false ipv6: enabled: false - description: Configuring Bridge ospbr with interface ens7 name: ospbr mtu: 1500 type: linux-bridge state: up bridge: options: stp: enabled: false port: - name: ens7 vlan: {} ipv4: address: - ip: 192.168.122.10 prefix-length: 24 enabled: true dhcp: false ipv6: enabled: false nodeSelector: kubernetes.io/hostname: crc node-role.kubernetes.io/worker: "" home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/storage/0000755000175000017500000000000015134437263026010 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/storage/storage.yaml0000644000175000017500000001634015134411360030332 0ustar zuulzuul--- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage01-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv01 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage02-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv02 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage03-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv03 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage04-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv04 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage05-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv05 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage06-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv06 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage07-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv07 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage08-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv08 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage09-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv09 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage10-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv10 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage11-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv11 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] --- kind: PersistentVolume apiVersion: v1 metadata: name: local-storage12-crc annotations: pv.kubernetes.io/provisioned-by: cifmw labels: provisioned-by: cifmw spec: storageClassName: local-storage capacity: storage: 10Gi accessModes: - ReadWriteOnce - ReadWriteMany - ReadOnlyMany persistentVolumeReclaimPolicy: Delete local: path: /mnt/openstack/pv12 type: DirectoryOrCreate volumeMode: Filesystem nodeAffinity: required: nodeSelectorTerms: - matchExpressions: - key: kubernetes.io/hostname operator: In values: [crc] home/zuul/zuul-output/logs/ci-framework-data/artifacts/manifests/storage/storage-class.yaml0000644000175000017500000000030015134411346031426 0ustar zuulzuulallowVolumeExpansion: true apiVersion: storage.k8s.io/v1 kind: StorageClass metadata: name: local-storage provisioner: kubernetes.io/no-provisioner volumeBindingMode: WaitForFirstConsumer home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_k8s_snippets/0000755000175000017500000000000015134412734025454 5ustar zuulzuulhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/0000755000175000017500000000000015134437263027544 5ustar zuulzuul././@LongLink0000644000000000000000000000015000000000000011577 Lustar rootroothome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/03_user_data_b64.yamlhome/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/03_user_data_b64.y0000644000175000017500000000000315134412735032650 0ustar zuulzuul{} home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/04_user_data.yaml0000644000175000017500000000000315134412735032670 0ustar zuulzuul{} home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/01_original.yaml0000644000175000017500000000041315134412736032530 0ustar zuulzuulapiVersion: v1 data: openstack-operator-channel: alpha openstack-operator-image: quay.io/openstack-k8s-operators/openstack-operator-index:latest kind: ConfigMap metadata: annotations: config.kubernetes.io/local-config: 'true' name: olm-values home/zuul/zuul-output/logs/ci-framework-data/artifacts/ci_k8s_snippets/olm-values/02_ci_data.yaml0000644000175000017500000000021715134412734032311 0ustar zuulzuul# source: common/olm-values/values.yaml.j2 data: openstack-operator-image: quay.io/openstack-k8s-operators/openstack-operator-index:latest home/zuul/zuul-output/logs/selinux-listing.log0000644000175000017500000015241515134437372020750 0ustar zuulzuul/home/zuul/ci-framework-data: total 236 drwxr-xr-x. 12 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Jan 22 15:14 artifacts drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Jan 22 15:14 logs -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 230693 Jan 22 12:19 tls-ca-bundle.pem drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 24 Jan 22 12:04 tmp drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Jan 22 12:06 volumes /home/zuul/ci-framework-data/artifacts: total 664 drwxrwxrwx. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Jan 22 15:14 ansible_facts.2026-01-22_15-14 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 21941 Jan 22 14:28 ansible-facts.yml -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 430818 Jan 22 15:13 ansible-vars.yml drwxr-xr-x. 2 root root unconfined_u:object_r:user_home_t:s0 33 Jan 22 15:13 ci-env drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 24 Jan 22 12:19 ci_gen_kustomize_values drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 24 Jan 22 12:19 ci_k8s_snippets -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 135 Jan 22 14:28 ci_script_000_check_for_oc.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 905 Jan 22 12:06 ci_script_000_run_hook_without_retry.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 745 Jan 22 14:28 ci_script_000_run_openstack_must_gather.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 213 Jan 22 12:06 ci_script_001_fetch_openshift.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 293 Jan 22 12:06 ci_script_002_login_into_openshift_internal.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1046 Jan 22 12:06 ci_script_003_run_hook_without_retry_fetch.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 747 Jan 22 12:07 ci_script_004_run_crc.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 737 Jan 22 12:07 ci_script_005_run.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 741 Jan 22 12:07 ci_script_006_run.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 756 Jan 22 12:11 ci_script_007_run_openstack.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 800 Jan 22 12:13 ci_script_008_run_openstack_deploy.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 758 Jan 22 12:13 ci_script_009_run_netconfig.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 234 Jan 22 12:13 ci_script_010_apply_the.sh -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 933 Jan 22 12:22 ci_script_011_run_standalone.sh -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 159 Jan 22 14:28 hosts -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 77914 Jan 22 14:28 installed-packages.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 218 Jan 22 12:06 install_yamls.sh -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 3612 Jan 22 14:28 ip-network.txt drwxr-xr-x. 11 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Jan 22 14:28 manifests drwxr-xr-x. 2 root root unconfined_u:object_r:user_home_t:s0 4096 Jan 22 15:13 NetworkManager -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 7696 Jan 22 15:14 operator_images.yaml drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 120 Jan 22 15:13 parameters -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 147 Jan 22 12:07 pre_deploy_fetch_compute_facts.yml drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Jan 22 15:13 repositories -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 106 Jan 22 14:28 resolv.conf drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Jan 22 12:06 roles drwxr-xr-x. 2 root root unconfined_u:object_r:user_home_t:s0 4096 Jan 22 15:13 yum_repos -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 36445 Jan 22 15:13 zuul_inventory.yml /home/zuul/ci-framework-data/artifacts/ansible_facts.2026-01-22_15-14: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 23 Jan 22 15:14 ansible_facts_cache /home/zuul/ci-framework-data/artifacts/ansible_facts.2026-01-22_15-14/ansible_facts_cache: total 72 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 72811 Jan 22 15:14 localhost /home/zuul/ci-framework-data/artifacts/ci-env: total 4 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 3380 Jan 22 14:28 networking-info.yml /home/zuul/ci-framework-data/artifacts/ci_gen_kustomize_values: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 25 Jan 22 15:13 olm-values /home/zuul/ci-framework-data/artifacts/ci_gen_kustomize_values/olm-values: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 267 Jan 22 12:19 values.yaml /home/zuul/ci-framework-data/artifacts/ci_k8s_snippets: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 107 Jan 22 15:13 olm-values /home/zuul/ci-framework-data/artifacts/ci_k8s_snippets/olm-values: total 16 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 267 Jan 22 12:19 01_original.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 143 Jan 22 12:19 02_ci_data.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3 Jan 22 12:19 03_user_data_b64.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3 Jan 22 12:19 04_user_data.yaml /home/zuul/ci-framework-data/artifacts/manifests: total 0 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 48 Jan 22 15:13 cert-manager-operator drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 26 Jan 22 15:13 crc drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 28 Jan 22 15:13 crc-storage drwxr-xr-x. 4 zuul zuul unconfined_u:object_r:user_home_t:s0 43 Jan 22 12:06 kustomizations drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 43 Jan 22 15:13 metallb-system drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 43 Jan 22 15:13 openshift-nmstate drwxr-xr-x. 8 zuul zuul unconfined_u:object_r:user_home_t:s0 110 Jan 22 15:13 openstack drwxr-xr-x. 6 zuul zuul unconfined_u:object_r:user_home_t:s0 88 Jan 22 15:13 openstack-operators drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 52 Jan 22 15:13 storage /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator: total 4 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:09 cert-manager -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 200 Jan 22 12:09 namespace.yaml /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 57 Jan 22 15:13 op /home/zuul/ci-framework-data/artifacts/manifests/cert-manager-operator/cert-manager/op: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 527 Jan 22 12:09 operatorgroup.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 407 Jan 22 12:09 subscription.yaml /home/zuul/ci-framework-data/artifacts/manifests/crc: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 8158 Jan 22 12:07 storage.yaml /home/zuul/ci-framework-data/artifacts/manifests/crc-storage: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 190 Jan 22 12:07 namespace.yaml /home/zuul/ci-framework-data/artifacts/manifests/kustomizations: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 35 Jan 22 15:13 controlplane drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Jan 22 12:06 dataplane /home/zuul/ci-framework-data/artifacts/manifests/kustomizations/controlplane: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 402 Jan 22 12:06 99-kustomization.yaml /home/zuul/ci-framework-data/artifacts/manifests/kustomizations/dataplane: total 0 /home/zuul/ci-framework-data/artifacts/manifests/metallb-system: total 4 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:08 metallb -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 193 Jan 22 12:08 namespace.yaml /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 161 Jan 22 15:13 cr /home/zuul/ci-framework-data/artifacts/manifests/metallb-system/metallb/cr: total 24 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 251 Jan 22 12:10 bgpadvertisement.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 803 Jan 22 12:10 bgpextras.yaml -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 438 Jan 22 15:13 bgppeers.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 178 Jan 22 12:08 deploy_operator.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 815 Jan 22 12:10 ipaddresspools.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 881 Jan 22 12:10 l2advertisement.yaml /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 196 Jan 22 12:07 namespace.yaml drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:07 nmstate /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 34 Jan 22 15:13 cr /home/zuul/ci-framework-data/artifacts/manifests/openshift-nmstate/nmstate/cr: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 66 Jan 22 12:07 deploy_operator.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack: total 4 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Jan 22 12:05 cr drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:13 infra drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 32 Jan 22 15:13 input -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 188 Jan 22 12:13 namespace.yaml drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:10 netattach drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:08 nncp drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:11 openstack /home/zuul/ci-framework-data/artifacts/manifests/openstack/cr: total 0 /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 94 Jan 22 15:13 cr /home/zuul/ci-framework-data/artifacts/manifests/openstack/infra/cr: total 12 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1602 Jan 22 12:13 kustomization.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1438 Jan 22 12:13 network_v1beta1_netconfig.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 191 Jan 22 12:13 vlan-config.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack/input: total 4 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 1393 Jan 22 15:13 kustomization.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack/netattach: total 4 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 4096 Jan 22 15:13 cr /home/zuul/ci-framework-data/artifacts/manifests/openstack/netattach/cr: total 36 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 421 Jan 22 12:10 ctlplane.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 270 Jan 22 12:10 datacentre.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 421 Jan 22 12:10 designateext.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 416 Jan 22 12:10 designate.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 420 Jan 22 12:10 internalapi.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 539 Jan 22 12:10 octavia.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 420 Jan 22 12:10 storagemgmt.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 412 Jan 22 12:10 storage.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 410 Jan 22 12:10 tenant.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack/nncp: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 27 Jan 22 15:13 cr /home/zuul/ci-framework-data/artifacts/manifests/openstack/nncp/cr: total 4 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 3102 Jan 22 12:08 crc_nncp.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 143 Jan 22 15:13 cr /home/zuul/ci-framework-data/artifacts/manifests/openstack/openstack/cr: total 36 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 13487 Jan 22 15:13 cifmw-kustomization-result.yaml -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 13421 Jan 22 15:13 core_v1beta1_openstackcontrolplane_galera_network_isolation.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1192 Jan 22 12:13 kustomization.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators: total 4 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:13 infra drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:08 metallb -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 198 Jan 22 12:10 namespace.yaml drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:07 nmstate drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 16 Jan 22 12:10 openstack /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/infra: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 6 Jan 22 12:13 op /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/infra/op: total 0 /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/metallb: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 57 Jan 22 15:13 op /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/metallb/op: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 119 Jan 22 12:08 operatorgroup.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 245 Jan 22 12:08 subscription.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/nmstate: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 57 Jan 22 15:13 op /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/nmstate/op: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 271 Jan 22 12:07 operatorgroup.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 384 Jan 22 12:07 subscription.yaml /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/openstack: total 0 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 83 Jan 22 15:13 op /home/zuul/ci-framework-data/artifacts/manifests/openstack-operators/openstack/op: total 12 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 236 Jan 22 12:10 catalogsource.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 117 Jan 22 12:10 operatorgroup.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 255 Jan 22 12:10 subscription.yaml /home/zuul/ci-framework-data/artifacts/manifests/storage: total 12 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 192 Jan 22 12:07 storage-class.yaml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 7392 Jan 22 12:07 storage.yaml /home/zuul/ci-framework-data/artifacts/NetworkManager: total 24 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 329 Jan 22 14:28 ci-private-network-20.nmconnection -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 329 Jan 22 14:28 ci-private-network-21.nmconnection -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 329 Jan 22 14:28 ci-private-network-22.nmconnection -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 329 Jan 22 14:28 ci-private-network-23.nmconnection -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 331 Jan 22 14:28 ci-private-network.nmconnection -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 178 Jan 22 14:28 ens3.nmconnection /home/zuul/ci-framework-data/artifacts/parameters: total 52 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 1257 Jan 22 15:13 custom-params.yml -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 28070 Jan 22 15:13 install-yamls-params.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 280 Jan 22 12:06 openshift-login-params.yml -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 15083 Jan 22 15:13 zuul-params.yml /home/zuul/ci-framework-data/artifacts/repositories: total 32 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1658 Jan 22 12:05 delorean-antelope-testing.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 5901 Jan 22 12:05 delorean.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 33 Jan 22 12:05 delorean.repo.md5 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 206 Jan 22 12:05 repo-setup-centos-appstream.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 196 Jan 22 12:05 repo-setup-centos-baseos.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 226 Jan 22 12:05 repo-setup-centos-highavailability.repo -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 201 Jan 22 12:05 repo-setup-centos-powertools.repo /home/zuul/ci-framework-data/artifacts/roles: total 0 drwxr-xr-x. 3 zuul zuul unconfined_u:object_r:user_home_t:s0 19 Jan 22 12:06 install_yamls_makes /home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes: total 20 drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 16384 Jan 22 15:13 tasks /home/zuul/ci-framework-data/artifacts/roles/install_yamls_makes/tasks: total 1256 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 741 Jan 22 12:06 make_all.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_ansibleee_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1041 Jan 22 12:06 make_ansibleee_kuttl_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 996 Jan 22 12:06 make_ansibleee_kuttl_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_ansibleee_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_ansibleee_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_ansibleee_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_ansibleee.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1185 Jan 22 12:06 make_attach_default_interface_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1065 Jan 22 12:06 make_attach_default_interface.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_barbican_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1041 Jan 22 12:06 make_barbican_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 996 Jan 22 12:06 make_barbican_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1056 Jan 22 12:06 make_barbican_deploy_validate.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_barbican_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 966 Jan 22 12:06 make_barbican_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_barbican_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_barbican_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 816 Jan 22 12:06 make_barbican.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_baremetal_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_baremetal_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_baremetal.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1170 Jan 22 12:06 make_bmaas_baremetal_net_nad_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1050 Jan 22 12:06 make_bmaas_baremetal_net_nad.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 900 Jan 22 12:06 make_bmaas_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1185 Jan 22 12:06 make_bmaas_crc_attach_network_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1065 Jan 22 12:06 make_bmaas_crc_attach_network.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1215 Jan 22 12:06 make_bmaas_crc_baremetal_bridge_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1095 Jan 22 12:06 make_bmaas_crc_baremetal_bridge.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1080 Jan 22 12:06 make_bmaas_generate_nodes_yaml.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1020 Jan 22 12:06 make_bmaas_metallb_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 900 Jan 22 12:06 make_bmaas_metallb.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1020 Jan 22 12:06 make_bmaas_network_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 900 Jan 22 12:06 make_bmaas_network.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1395 Jan 22 12:06 make_bmaas_route_crc_and_crc_bmaas_networks_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1275 Jan 22 12:06 make_bmaas_route_crc_and_crc_bmaas_networks.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1125 Jan 22 12:06 make_bmaas_sushy_emulator_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1080 Jan 22 12:06 make_bmaas_sushy_emulator_wait.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1005 Jan 22 12:06 make_bmaas_sushy_emulator.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1080 Jan 22 12:06 make_bmaas_virtual_bms_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 960 Jan 22 12:06 make_bmaas_virtual_bms.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 780 Jan 22 12:06 make_bmaas.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_ceph_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_ceph_help.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 756 Jan 22 12:06 make_ceph.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_certmanager_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_certmanager.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 900 Jan 22 12:06 make_cifmw_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 900 Jan 22 12:06 make_cifmw_prepare.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_cinder_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_cinder_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 966 Jan 22 12:06 make_cinder_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_cinder_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_cinder_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_cinder_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_cinder_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 786 Jan 22 12:06 make_cinder.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 801 Jan 22 12:06 make_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1245 Jan 22 12:06 make_crc_attach_default_interface_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1125 Jan 22 12:06 make_crc_attach_default_interface.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_crc_bmo_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_crc_bmo_setup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 870 Jan 22 12:06 make_crc_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 840 Jan 22 12:06 make_crc_scrub.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1176 Jan 22 12:06 make_crc_storage_cleanup_with_retries.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_crc_storage_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_crc_storage_release.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1056 Jan 22 12:06 make_crc_storage_with_retries.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_crc_storage.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 750 Jan 22 12:06 make_crc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_designate_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1056 Jan 22 12:06 make_designate_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_designate_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_designate_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_designate_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_designate_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_designate_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_designate.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 966 Jan 22 12:06 make_dns_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_dns_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 846 Jan 22 12:06 make_dns_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 915 Jan 22 12:06 make_download_tools.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 990 Jan 22 12:06 make_edpm_ansible_runner.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1035 Jan 22 12:06 make_edpm_baremetal_compute.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 975 Jan 22 12:06 make_edpm_compute_bootc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1005 Jan 22 12:06 make_edpm_compute_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 975 Jan 22 12:06 make_edpm_compute_repos.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 960 Jan 22 12:06 make_edpm_computes_bgp.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 885 Jan 22 12:06 make_edpm_compute.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1086 Jan 22 12:06 make_edpm_deploy_baremetal_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_edpm_deploy_baremetal.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_edpm_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1071 Jan 22 12:06 make_edpm_deploy_generate_keys.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1005 Jan 22 12:06 make_edpm_deploy_instance.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1131 Jan 22 12:06 make_edpm_deploy_networker_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1086 Jan 22 12:06 make_edpm_deploy_networker_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_edpm_deploy_networker.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_edpm_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_edpm_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1035 Jan 22 12:06 make_edpm_networker_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 915 Jan 22 12:06 make_edpm_networker.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1056 Jan 22 12:06 make_edpm_nova_discover_hosts.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1161 Jan 22 12:06 make_edpm_patch_ansible_runner_image.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_edpm_register_dns.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1086 Jan 22 12:06 make_edpm_wait_deploy_baremetal.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_edpm_wait_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_glance_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_glance_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 966 Jan 22 12:06 make_glance_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_glance_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_glance_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_glance_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_glance_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 786 Jan 22 12:06 make_glance.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_heat_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_heat_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_heat_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_heat_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_heat_kuttl_crc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_heat_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 846 Jan 22 12:06 make_heat_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_heat_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 756 Jan 22 12:06 make_heat.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 765 Jan 22 12:06 make_help.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_horizon_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1026 Jan 22 12:06 make_horizon_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_horizon_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_horizon_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_horizon_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_horizon_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_horizon_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 801 Jan 22 12:06 make_horizon.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_infra_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_infra_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_infra_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 846 Jan 22 12:06 make_infra_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 771 Jan 22 12:06 make_infra.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_input_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 771 Jan 22 12:06 make_input.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 945 Jan 22 12:06 make_ipv6_lab_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1140 Jan 22 12:06 make_ipv6_lab_nat64_router_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1020 Jan 22 12:06 make_ipv6_lab_nat64_router.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1065 Jan 22 12:06 make_ipv6_lab_network_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 945 Jan 22 12:06 make_ipv6_lab_network.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1005 Jan 22 12:06 make_ipv6_lab_sno_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 885 Jan 22 12:06 make_ipv6_lab_sno.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 825 Jan 22 12:06 make_ipv6_lab.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_ironic_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_ironic_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 966 Jan 22 12:06 make_ironic_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_ironic_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_ironic_kuttl_crc.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_ironic_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_ironic_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_ironic_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 786 Jan 22 12:06 make_ironic.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_keystone_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1041 Jan 22 12:06 make_keystone_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 996 Jan 22 12:06 make_keystone_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_keystone_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 966 Jan 22 12:06 make_keystone_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_keystone_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_keystone_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 816 Jan 22 12:06 make_keystone.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 996 Jan 22 12:06 make_kuttl_common_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_kuttl_common_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_kuttl_db_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_kuttl_db_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_loki_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_loki_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_loki_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 756 Jan 22 12:06 make_loki.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 756 Jan 22 12:06 make_lvms.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_manila_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_manila_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 966 Jan 22 12:06 make_manila_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_manila_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_manila_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_manila_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_manila_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 786 Jan 22 12:06 make_manila.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 996 Jan 22 12:06 make_mariadb_chainsaw_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_mariadb_chainsaw.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_mariadb_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1026 Jan 22 12:06 make_mariadb_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_mariadb_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_mariadb_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_mariadb_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_mariadb_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 801 Jan 22 12:06 make_mariadb.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1056 Jan 22 12:06 make_memcached_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_memcached_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_memcached_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_metallb_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1026 Jan 22 12:06 make_metallb_config_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_metallb_config.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 801 Jan 22 12:06 make_metallb.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_namespace_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_namespace.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_netattach_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_netattach.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1056 Jan 22 12:06 make_netconfig_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_netconfig_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_netconfig_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_netobserv_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1056 Jan 22 12:06 make_netobserv_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_netobserv_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_netobserv.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1185 Jan 22 12:06 make_network_isolation_bridge_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1065 Jan 22 12:06 make_network_isolation_bridge.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_neutron_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1026 Jan 22 12:06 make_neutron_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_neutron_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_neutron_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_neutron_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_neutron_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_neutron_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 801 Jan 22 12:06 make_neutron.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 870 Jan 22 12:06 make_nfs_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 750 Jan 22 12:06 make_nfs.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 801 Jan 22 12:06 make_nmstate.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_nncp_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 756 Jan 22 12:06 make_nncp.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_nova_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_nova_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_nova_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_nova_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_nova_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 756 Jan 22 12:06 make_nova.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_octavia_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1026 Jan 22 12:06 make_octavia_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_octavia_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_octavia_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_octavia_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_octavia_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_octavia_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 801 Jan 22 12:06 make_octavia.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_openstack_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1026 Jan 22 12:06 make_openstack_crds_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_openstack_crds.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1056 Jan 22 12:06 make_openstack_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_openstack_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_openstack_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_openstack_init.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_openstack_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_openstack_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1041 Jan 22 12:06 make_openstack_patch_version.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_openstack_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_openstack_repo.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 996 Jan 22 12:06 make_openstack_update_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_openstack_wait_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_openstack_wait.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_openstack.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 966 Jan 22 12:06 make_operator_namespace.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_ovn_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 966 Jan 22 12:06 make_ovn_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_ovn_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 846 Jan 22 12:06 make_ovn_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_ovn_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_ovn_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 816 Jan 22 12:06 make_ovn_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 741 Jan 22 12:06 make_ovn.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_placement_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1056 Jan 22 12:06 make_placement_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_placement_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_placement_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_placement_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_placement_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_placement_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_placement.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_rabbitmq_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1041 Jan 22 12:06 make_rabbitmq_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 996 Jan 22 12:06 make_rabbitmq_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_rabbitmq_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_rabbitmq_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 816 Jan 22 12:06 make_rabbitmq.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 996 Jan 22 12:06 make_redis_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_redis_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_redis_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_rook_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_rook_crc_disk.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_rook_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_rook_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_rook_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 756 Jan 22 12:06 make_rook.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1041 Jan 22 12:06 make_set_slower_etcd_profile.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 975 Jan 22 12:06 make_standalone_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 960 Jan 22 12:06 make_standalone_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 960 Jan 22 12:06 make_standalone_revert.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 990 Jan 22 12:06 make_standalone_snapshot.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 930 Jan 22 12:06 make_standalone_sync.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 855 Jan 22 12:06 make_standalone.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_swift_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 996 Jan 22 12:06 make_swift_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_swift_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 876 Jan 22 12:06 make_swift_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_swift_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 861 Jan 22 12:06 make_swift_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 846 Jan 22 12:06 make_swift_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 771 Jan 22 12:06 make_swift.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 951 Jan 22 12:06 make_telemetry_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1056 Jan 22 12:06 make_telemetry_deploy_cleanup.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1011 Jan 22 12:06 make_telemetry_deploy_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 936 Jan 22 12:06 make_telemetry_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 981 Jan 22 12:06 make_telemetry_kuttl_run.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_telemetry_kuttl.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 906 Jan 22 12:06 make_telemetry_prep.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 831 Jan 22 12:06 make_telemetry.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 915 Jan 22 12:06 make_tripleo_deploy.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 921 Jan 22 12:06 make_update_services.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 891 Jan 22 12:06 make_update_system.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 996 Jan 22 12:06 make_validate_marketplace.yml -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 756 Jan 22 12:06 make_wait.yml /home/zuul/ci-framework-data/artifacts/yum_repos: total 32 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 1658 Jan 22 14:28 delorean-antelope-testing.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 5901 Jan 22 14:28 delorean.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 33 Jan 22 14:28 delorean.repo.md5 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 206 Jan 22 14:28 repo-setup-centos-appstream.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 196 Jan 22 14:28 repo-setup-centos-baseos.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 226 Jan 22 14:28 repo-setup-centos-highavailability.repo -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 201 Jan 22 14:28 repo-setup-centos-powertools.repo /home/zuul/ci-framework-data/logs: total 1812 drwxrwxr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 25 Jan 22 15:14 2026-01-22_14-28 -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 340421 Jan 22 15:13 ansible.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 18 Jan 22 14:28 ci_script_000_check_for_oc.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 9953 Jan 22 12:06 ci_script_000_run_hook_without_retry.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 125407 Jan 22 15:13 ci_script_000_run_openstack_must_gather.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 234 Jan 22 12:06 ci_script_001_fetch_openshift.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 17 Jan 22 12:06 ci_script_002_login_into_openshift_internal.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 12528 Jan 22 12:07 ci_script_003_run_hook_without_retry_fetch.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 9076 Jan 22 12:07 ci_script_004_run_crc.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2215 Jan 22 12:07 ci_script_005_run.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 24754 Jan 22 12:10 ci_script_006_run.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 2946 Jan 22 12:13 ci_script_007_run_openstack.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 4459 Jan 22 12:13 ci_script_008_run_openstack_deploy.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 8568 Jan 22 12:13 ci_script_009_run_netconfig.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 62 Jan 22 12:13 ci_script_010_apply_the.log -rw-r--r--. 1 root root unconfined_u:object_r:user_home_t:s0 1231024 Jan 22 15:13 ci_script_011_run_standalone.log drwxr-xr-x. 2 zuul zuul unconfined_u:object_r:user_home_t:s0 47 Jan 22 15:13 openstack-must-gather -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 16646 Jan 22 12:07 pre_deploy_fetch_compute_facts.log -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 15848 Jan 22 12:06 pre_infra_download_needed_tools.log /home/zuul/ci-framework-data/logs/2026-01-22_14-28: total 336 -rw-rw-rw-. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 340391 Jan 22 13:14 ansible.log /home/zuul/ci-framework-data/logs/openstack-must-gather: total 8 -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 1240 Jan 22 14:28 must-gather.logs -rw-r--r--. 1 zuul zuul unconfined_u:object_r:user_home_t:s0 55 Jan 22 14:28 timestamp /home/zuul/ci-framework-data/tmp: total 0 /home/zuul/ci-framework-data/volumes: total 0 home/zuul/zuul-output/logs/README.html0000644000175000017500000000306615134437377016734 0ustar zuulzuul README for CIFMW Logs

Logs of interest

Generated content of interest

home/zuul/zuul-output/logs/installed-pkgs.log0000644000175000017500000004753215134437400020526 0ustar zuulzuulaardvark-dns-1.17.0-1.el9.x86_64 abattis-cantarell-fonts-0.301-4.el9.noarch acl-2.3.1-4.el9.x86_64 adobe-source-code-pro-fonts-2.030.1.050-12.el9.1.noarch alternatives-1.24-2.el9.x86_64 annobin-12.98-1.el9.x86_64 ansible-core-2.14.18-2.el9.x86_64 apr-1.7.0-12.el9.x86_64 apr-util-1.6.1-23.el9.x86_64 apr-util-bdb-1.6.1-23.el9.x86_64 apr-util-openssl-1.6.1-23.el9.x86_64 attr-2.5.1-3.el9.x86_64 audit-3.1.5-8.el9.x86_64 audit-libs-3.1.5-8.el9.x86_64 authselect-1.2.6-3.el9.x86_64 authselect-compat-1.2.6-3.el9.x86_64 authselect-libs-1.2.6-3.el9.x86_64 basesystem-11-13.el9.noarch bash-5.1.8-9.el9.x86_64 bash-completion-2.11-5.el9.noarch binutils-2.35.2-69.el9.x86_64 binutils-gold-2.35.2-69.el9.x86_64 buildah-1.41.3-1.el9.x86_64 bzip2-1.0.8-10.el9.x86_64 bzip2-libs-1.0.8-10.el9.x86_64 ca-certificates-2025.2.80_v9.0.305-91.el9.noarch c-ares-1.19.1-2.el9.x86_64 centos-gpg-keys-9.0-34.el9.noarch centos-logos-90.9-1.el9.x86_64 centos-stream-release-9.0-34.el9.noarch centos-stream-repos-9.0-34.el9.noarch checkpolicy-3.6-1.el9.x86_64 chrony-4.8-1.el9.x86_64 cloud-init-24.4-8.el9.noarch cloud-utils-growpart-0.33-1.el9.x86_64 cmake-filesystem-3.31.8-3.el9.x86_64 cockpit-bridge-348-1.el9.noarch cockpit-system-348-1.el9.noarch cockpit-ws-348-1.el9.x86_64 cockpit-ws-selinux-348-1.el9.x86_64 conmon-2.1.13-1.el9.x86_64 containers-common-1-134.el9.x86_64 containers-common-extra-1-134.el9.x86_64 container-selinux-2.244.0-1.el9.noarch coreutils-8.32-39.el9.x86_64 coreutils-common-8.32-39.el9.x86_64 cpio-2.13-16.el9.x86_64 cpp-11.5.0-14.el9.x86_64 cracklib-2.9.6-28.el9.x86_64 cracklib-dicts-2.9.6-28.el9.x86_64 createrepo_c-0.20.1-4.el9.x86_64 createrepo_c-libs-0.20.1-4.el9.x86_64 criu-3.19-3.el9.x86_64 criu-libs-3.19-3.el9.x86_64 cronie-1.5.7-14.el9.x86_64 cronie-anacron-1.5.7-14.el9.x86_64 crontabs-1.11-26.20190603git.el9.noarch crun-1.24-1.el9.x86_64 crypto-policies-20251126-1.gite9c4db2.el9.noarch crypto-policies-scripts-20251126-1.gite9c4db2.el9.noarch cryptsetup-libs-2.8.1-2.el9.x86_64 curl-7.76.1-38.el9.x86_64 cyrus-sasl-2.1.27-21.el9.x86_64 cyrus-sasl-devel-2.1.27-21.el9.x86_64 cyrus-sasl-gssapi-2.1.27-21.el9.x86_64 cyrus-sasl-lib-2.1.27-21.el9.x86_64 dbus-1.12.20-8.el9.x86_64 dbus-broker-28-7.el9.x86_64 dbus-common-1.12.20-8.el9.noarch dbus-libs-1.12.20-8.el9.x86_64 dbus-tools-1.12.20-8.el9.x86_64 debugedit-5.0-11.el9.x86_64 dejavu-sans-fonts-2.37-18.el9.noarch desktop-file-utils-0.26-6.el9.x86_64 device-mapper-1.02.206-2.el9.x86_64 device-mapper-libs-1.02.206-2.el9.x86_64 dhcp-client-4.4.2-19.b1.el9.x86_64 dhcp-common-4.4.2-19.b1.el9.noarch diffutils-3.7-12.el9.x86_64 dnf-4.14.0-31.el9.noarch dnf-data-4.14.0-31.el9.noarch dnf-plugins-core-4.3.0-25.el9.noarch dracut-057-102.git20250818.el9.x86_64 dracut-config-generic-057-102.git20250818.el9.x86_64 dracut-network-057-102.git20250818.el9.x86_64 dracut-squash-057-102.git20250818.el9.x86_64 dwz-0.16-1.el9.x86_64 e2fsprogs-1.46.5-8.el9.x86_64 e2fsprogs-libs-1.46.5-8.el9.x86_64 ed-1.14.2-12.el9.x86_64 efi-srpm-macros-6-4.el9.noarch elfutils-0.194-1.el9.x86_64 elfutils-debuginfod-client-0.194-1.el9.x86_64 elfutils-default-yama-scope-0.194-1.el9.noarch elfutils-libelf-0.194-1.el9.x86_64 elfutils-libs-0.194-1.el9.x86_64 emacs-filesystem-27.2-18.el9.noarch enchant-1.6.0-30.el9.x86_64 ethtool-6.15-2.el9.x86_64 expat-2.5.0-6.el9.x86_64 expect-5.45.4-16.el9.x86_64 file-5.39-16.el9.x86_64 file-libs-5.39-16.el9.x86_64 filesystem-3.16-5.el9.x86_64 findutils-4.8.0-7.el9.x86_64 fonts-filesystem-2.0.5-7.el9.1.noarch fonts-srpm-macros-2.0.5-7.el9.1.noarch fuse3-3.10.2-9.el9.x86_64 fuse3-libs-3.10.2-9.el9.x86_64 fuse-common-3.10.2-9.el9.x86_64 fuse-libs-2.9.9-17.el9.x86_64 fuse-overlayfs-1.16-1.el9.x86_64 gawk-5.1.0-6.el9.x86_64 gawk-all-langpacks-5.1.0-6.el9.x86_64 gcc-11.5.0-14.el9.x86_64 gcc-c++-11.5.0-14.el9.x86_64 gcc-plugin-annobin-11.5.0-14.el9.x86_64 gdb-minimal-16.3-2.el9.x86_64 gdbm-libs-1.23-1.el9.x86_64 gdisk-1.0.7-5.el9.x86_64 gdk-pixbuf2-2.42.6-6.el9.x86_64 geolite2-city-20191217-6.el9.noarch geolite2-country-20191217-6.el9.noarch gettext-0.21-8.el9.x86_64 gettext-libs-0.21-8.el9.x86_64 ghc-srpm-macros-1.5.0-6.el9.noarch git-2.47.3-1.el9.x86_64 git-core-2.47.3-1.el9.x86_64 git-core-doc-2.47.3-1.el9.noarch glib2-2.68.4-18.el9.x86_64 glibc-2.34-245.el9.x86_64 glibc-common-2.34-245.el9.x86_64 glibc-devel-2.34-245.el9.x86_64 glibc-gconv-extra-2.34-245.el9.x86_64 glibc-headers-2.34-245.el9.x86_64 glibc-langpack-en-2.34-245.el9.x86_64 glib-networking-2.68.3-3.el9.x86_64 gmp-6.2.0-13.el9.x86_64 gnupg2-2.3.3-5.el9.x86_64 gnutls-3.8.10-2.el9.x86_64 gobject-introspection-1.68.0-11.el9.x86_64 go-srpm-macros-3.8.1-1.el9.noarch gpgme-1.15.1-6.el9.x86_64 gpg-pubkey-8483c65d-5ccc5b19 grep-3.6-5.el9.x86_64 groff-base-1.22.4-10.el9.x86_64 grub2-common-2.06-120.el9.noarch grub2-pc-2.06-120.el9.x86_64 grub2-pc-modules-2.06-120.el9.noarch grub2-tools-2.06-120.el9.x86_64 grub2-tools-minimal-2.06-120.el9.x86_64 grubby-8.40-69.el9.x86_64 gsettings-desktop-schemas-40.0-8.el9.x86_64 gssproxy-0.8.4-7.el9.x86_64 gzip-1.12-1.el9.x86_64 hostname-3.23-6.el9.x86_64 httpd-tools-2.4.62-10.el9.x86_64 hunspell-1.7.0-11.el9.x86_64 hunspell-en-GB-0.20140811.1-20.el9.noarch hunspell-en-US-0.20140811.1-20.el9.noarch hunspell-filesystem-1.7.0-11.el9.x86_64 hwdata-0.348-9.20.el9.noarch ima-evm-utils-1.6.2-2.el9.x86_64 info-6.7-15.el9.x86_64 inih-49-6.el9.x86_64 initscripts-rename-device-10.11.8-4.el9.x86_64 initscripts-service-10.11.8-4.el9.noarch ipcalc-1.0.0-5.el9.x86_64 iproute-6.17.0-1.el9.x86_64 iproute-tc-6.17.0-1.el9.x86_64 iptables-libs-1.8.10-11.el9.x86_64 iptables-nft-1.8.10-11.el9.x86_64 iptables-nft-services-1.8.10-11.el9.noarch iputils-20210202-15.el9.x86_64 irqbalance-1.9.4-5.el9.x86_64 jansson-2.14-1.el9.x86_64 jq-1.6-19.el9.x86_64 json-c-0.14-11.el9.x86_64 json-glib-1.6.6-1.el9.x86_64 kbd-2.4.0-11.el9.x86_64 kbd-legacy-2.4.0-11.el9.noarch kbd-misc-2.4.0-11.el9.noarch kernel-5.14.0-661.el9.x86_64 kernel-core-5.14.0-661.el9.x86_64 kernel-headers-5.14.0-661.el9.x86_64 kernel-modules-5.14.0-661.el9.x86_64 kernel-modules-core-5.14.0-661.el9.x86_64 kernel-srpm-macros-1.0-14.el9.noarch kernel-tools-5.14.0-661.el9.x86_64 kernel-tools-libs-5.14.0-661.el9.x86_64 kexec-tools-2.0.29-14.el9.x86_64 keyutils-1.6.3-1.el9.x86_64 keyutils-libs-1.6.3-1.el9.x86_64 kmod-28-11.el9.x86_64 kmod-libs-28-11.el9.x86_64 kpartx-0.8.7-42.el9.x86_64 krb5-libs-1.21.1-8.el9.x86_64 langpacks-core-en_GB-3.0-16.el9.noarch langpacks-core-font-en-3.0-16.el9.noarch langpacks-en_GB-3.0-16.el9.noarch less-590-6.el9.x86_64 libacl-2.3.1-4.el9.x86_64 libappstream-glib-0.7.18-5.el9.x86_64 libarchive-3.5.3-6.el9.x86_64 libassuan-2.5.5-3.el9.x86_64 libattr-2.5.1-3.el9.x86_64 libbasicobjects-0.1.1-53.el9.x86_64 libblkid-2.37.4-21.el9.x86_64 libbpf-1.5.0-3.el9.x86_64 libbrotli-1.0.9-7.el9.x86_64 libburn-1.5.4-5.el9.x86_64 libcap-2.48-10.el9.x86_64 libcap-ng-0.8.2-7.el9.x86_64 libcbor-0.7.0-5.el9.x86_64 libcollection-0.7.0-53.el9.x86_64 libcom_err-1.46.5-8.el9.x86_64 libcomps-0.1.18-1.el9.x86_64 libcurl-7.76.1-38.el9.x86_64 libdaemon-0.14-23.el9.x86_64 libdb-5.3.28-57.el9.x86_64 libdhash-0.5.0-53.el9.x86_64 libdnf-0.69.0-16.el9.x86_64 libeconf-0.4.1-5.el9.x86_64 libedit-3.1-38.20210216cvs.el9.x86_64 libestr-0.1.11-4.el9.x86_64 libev-4.33-6.el9.x86_64 libevent-2.1.12-8.el9.x86_64 libfastjson-0.99.9-5.el9.x86_64 libfdisk-2.37.4-21.el9.x86_64 libffi-3.4.2-8.el9.x86_64 libffi-devel-3.4.2-8.el9.x86_64 libfido2-1.13.0-2.el9.x86_64 libgcc-11.5.0-14.el9.x86_64 libgcrypt-1.10.0-11.el9.x86_64 libgomp-11.5.0-14.el9.x86_64 libgpg-error-1.42-5.el9.x86_64 libgpg-error-devel-1.42-5.el9.x86_64 libibverbs-57.0-2.el9.x86_64 libicu-67.1-10.el9.x86_64 libidn2-2.3.0-7.el9.x86_64 libini_config-1.3.1-53.el9.x86_64 libisoburn-1.5.4-5.el9.x86_64 libisofs-1.5.4-4.el9.x86_64 libjpeg-turbo-2.0.90-7.el9.x86_64 libkcapi-1.4.0-2.el9.x86_64 libkcapi-hmaccalc-1.4.0-2.el9.x86_64 libksba-1.5.1-7.el9.x86_64 libldb-4.23.4-2.el9.x86_64 libmaxminddb-1.5.2-4.el9.x86_64 libmnl-1.0.4-16.el9.x86_64 libmodulemd-2.13.0-2.el9.x86_64 libmount-2.37.4-21.el9.x86_64 libmpc-1.2.1-4.el9.x86_64 libndp-1.9-1.el9.x86_64 libnet-1.2-7.el9.x86_64 libnetfilter_conntrack-1.0.9-1.el9.x86_64 libnfnetlink-1.0.1-23.el9.x86_64 libnfsidmap-2.5.4-41.el9.x86_64 libnftnl-1.2.6-4.el9.x86_64 libnghttp2-1.43.0-6.el9.x86_64 libnl3-3.11.0-1.el9.x86_64 libnl3-cli-3.11.0-1.el9.x86_64 libosinfo-1.10.0-1.el9.x86_64 libpath_utils-0.2.1-53.el9.x86_64 libpcap-1.10.0-4.el9.x86_64 libpipeline-1.5.3-4.el9.x86_64 libpkgconf-1.7.3-10.el9.x86_64 libpng-1.6.37-12.el9.x86_64 libproxy-0.4.15-35.el9.x86_64 libproxy-webkitgtk4-0.4.15-35.el9.x86_64 libpsl-0.21.1-5.el9.x86_64 libpwquality-1.4.4-8.el9.x86_64 libref_array-0.1.5-53.el9.x86_64 librepo-1.19.0-1.el9.x86_64 libreport-filesystem-2.15.2-6.el9.noarch libseccomp-2.5.2-2.el9.x86_64 libselinux-3.6-3.el9.x86_64 libselinux-utils-3.6-3.el9.x86_64 libsemanage-3.6-5.el9.x86_64 libsepol-3.6-3.el9.x86_64 libsigsegv-2.13-4.el9.x86_64 libslirp-4.4.0-8.el9.x86_64 libsmartcols-2.37.4-21.el9.x86_64 libsolv-0.7.24-3.el9.x86_64 libsoup-2.72.0-10.el9.x86_64 libss-1.46.5-8.el9.x86_64 libssh-0.10.4-17.el9.x86_64 libssh-config-0.10.4-17.el9.noarch libsss_certmap-2.9.7-5.el9.x86_64 libsss_idmap-2.9.7-5.el9.x86_64 libsss_nss_idmap-2.9.7-5.el9.x86_64 libsss_sudo-2.9.7-5.el9.x86_64 libstdc++-11.5.0-14.el9.x86_64 libstdc++-devel-11.5.0-14.el9.x86_64 libstemmer-0-18.585svn.el9.x86_64 libsysfs-2.1.1-11.el9.x86_64 libtalloc-2.4.3-1.el9.x86_64 libtasn1-4.16.0-9.el9.x86_64 libtdb-1.4.14-1.el9.x86_64 libteam-1.31-16.el9.x86_64 libtevent-0.17.1-1.el9.x86_64 libtirpc-1.3.3-9.el9.x86_64 libtool-ltdl-2.4.6-46.el9.x86_64 libunistring-0.9.10-15.el9.x86_64 liburing-2.12-1.el9.x86_64 libuser-0.63-17.el9.x86_64 libutempter-1.2.1-6.el9.x86_64 libuuid-2.37.4-21.el9.x86_64 libverto-0.3.2-3.el9.x86_64 libverto-libev-0.3.2-3.el9.x86_64 libvirt-client-11.10.0-2.el9.x86_64 libvirt-libs-11.10.0-2.el9.x86_64 libxcrypt-4.4.18-3.el9.x86_64 libxcrypt-compat-4.4.18-3.el9.x86_64 libxcrypt-devel-4.4.18-3.el9.x86_64 libxml2-2.9.13-14.el9.x86_64 libxml2-devel-2.9.13-14.el9.x86_64 libxslt-1.1.34-12.el9.x86_64 libxslt-devel-1.1.34-12.el9.x86_64 libyaml-0.2.5-7.el9.x86_64 libzstd-1.5.5-1.el9.x86_64 llvm-filesystem-21.1.7-1.el9.x86_64 llvm-libs-21.1.7-1.el9.x86_64 lmdb-libs-0.9.29-3.el9.x86_64 logrotate-3.18.0-12.el9.x86_64 lshw-B.02.20-4.el9.x86_64 lsscsi-0.32-6.el9.x86_64 lua-libs-5.4.4-4.el9.x86_64 lua-srpm-macros-1-6.el9.noarch lz4-libs-1.9.3-5.el9.x86_64 lzo-2.10-7.el9.x86_64 make-4.3-8.el9.x86_64 man-db-2.9.3-9.el9.x86_64 microcode_ctl-20251111-1.el9.noarch mpfr-4.1.0-8.el9.x86_64 ncurses-6.2-12.20210508.el9.x86_64 ncurses-base-6.2-12.20210508.el9.noarch ncurses-c++-libs-6.2-12.20210508.el9.x86_64 ncurses-devel-6.2-12.20210508.el9.x86_64 ncurses-libs-6.2-12.20210508.el9.x86_64 netavark-1.16.0-1.el9.x86_64 nettle-3.10.1-1.el9.x86_64 NetworkManager-1.54.3-2.el9.x86_64 NetworkManager-libnm-1.54.3-2.el9.x86_64 NetworkManager-team-1.54.3-2.el9.x86_64 NetworkManager-tui-1.54.3-2.el9.x86_64 newt-0.52.21-11.el9.x86_64 nfs-utils-2.5.4-41.el9.x86_64 nftables-1.0.9-6.el9.x86_64 npth-1.6-8.el9.x86_64 numactl-libs-2.0.19-3.el9.x86_64 ocaml-srpm-macros-6-6.el9.noarch oddjob-0.34.7-7.el9.x86_64 oddjob-mkhomedir-0.34.7-7.el9.x86_64 oniguruma-6.9.6-1.el9.6.x86_64 openblas-srpm-macros-2-11.el9.noarch openldap-2.6.8-4.el9.x86_64 openldap-devel-2.6.8-4.el9.x86_64 openssh-9.9p1-3.el9.x86_64 openssh-clients-9.9p1-3.el9.x86_64 openssh-server-9.9p1-3.el9.x86_64 openssl-3.5.1-6.el9.x86_64 openssl-devel-3.5.1-6.el9.x86_64 openssl-fips-provider-3.5.1-6.el9.x86_64 openssl-libs-3.5.1-6.el9.x86_64 osinfo-db-20250606-1.el9.noarch osinfo-db-tools-1.10.0-1.el9.x86_64 os-prober-1.77-12.el9.x86_64 p11-kit-0.25.10-1.el9.x86_64 p11-kit-trust-0.25.10-1.el9.x86_64 pam-1.5.1-28.el9.x86_64 parted-3.5-3.el9.x86_64 passt-0^20251210.gd04c480-2.el9.x86_64 passt-selinux-0^20251210.gd04c480-2.el9.noarch passwd-0.80-12.el9.x86_64 patch-2.7.6-16.el9.x86_64 pciutils-libs-3.7.0-7.el9.x86_64 pcre2-10.40-6.el9.x86_64 pcre2-syntax-10.40-6.el9.noarch pcre-8.44-4.el9.x86_64 perl-AutoLoader-5.74-483.el9.noarch perl-B-1.80-483.el9.x86_64 perl-base-2.27-483.el9.noarch perl-Carp-1.50-460.el9.noarch perl-Class-Struct-0.66-483.el9.noarch perl-constant-1.33-461.el9.noarch perl-Data-Dumper-2.174-462.el9.x86_64 perl-Digest-1.19-4.el9.noarch perl-Digest-MD5-2.58-4.el9.x86_64 perl-DynaLoader-1.47-483.el9.x86_64 perl-Encode-3.08-462.el9.x86_64 perl-Errno-1.30-483.el9.x86_64 perl-Error-0.17029-7.el9.noarch perl-Exporter-5.74-461.el9.noarch perl-Fcntl-1.13-483.el9.x86_64 perl-File-Basename-2.85-483.el9.noarch perl-File-Find-1.37-483.el9.noarch perl-FileHandle-2.03-483.el9.noarch perl-File-Path-2.18-4.el9.noarch perl-File-stat-1.09-483.el9.noarch perl-File-Temp-0.231.100-4.el9.noarch perl-Getopt-Long-2.52-4.el9.noarch perl-Getopt-Std-1.12-483.el9.noarch perl-Git-2.47.3-1.el9.noarch perl-HTTP-Tiny-0.076-462.el9.noarch perl-if-0.60.800-483.el9.noarch perl-interpreter-5.32.1-483.el9.x86_64 perl-IO-1.43-483.el9.x86_64 perl-IO-Socket-IP-0.41-5.el9.noarch perl-IO-Socket-SSL-2.073-2.el9.noarch perl-IPC-Open3-1.21-483.el9.noarch perl-lib-0.65-483.el9.x86_64 perl-libnet-3.13-4.el9.noarch perl-libs-5.32.1-483.el9.x86_64 perl-MIME-Base64-3.16-4.el9.x86_64 perl-Mozilla-CA-20200520-6.el9.noarch perl-mro-1.23-483.el9.x86_64 perl-NDBM_File-1.15-483.el9.x86_64 perl-Net-SSLeay-1.94-3.el9.x86_64 perl-overload-1.31-483.el9.noarch perl-overloading-0.02-483.el9.noarch perl-parent-0.238-460.el9.noarch perl-PathTools-3.78-461.el9.x86_64 perl-Pod-Escapes-1.07-460.el9.noarch perl-podlators-4.14-460.el9.noarch perl-Pod-Perldoc-3.28.01-461.el9.noarch perl-Pod-Simple-3.42-4.el9.noarch perl-Pod-Usage-2.01-4.el9.noarch perl-POSIX-1.94-483.el9.x86_64 perl-Scalar-List-Utils-1.56-462.el9.x86_64 perl-SelectSaver-1.02-483.el9.noarch perl-Socket-2.031-4.el9.x86_64 perl-srpm-macros-1-41.el9.noarch perl-Storable-3.21-460.el9.x86_64 perl-subs-1.03-483.el9.noarch perl-Symbol-1.08-483.el9.noarch perl-Term-ANSIColor-5.01-461.el9.noarch perl-Term-Cap-1.17-460.el9.noarch perl-TermReadKey-2.38-11.el9.x86_64 perl-Text-ParseWords-3.30-460.el9.noarch perl-Text-Tabs+Wrap-2013.0523-460.el9.noarch perl-Time-Local-1.300-7.el9.noarch perl-URI-5.09-3.el9.noarch perl-vars-1.05-483.el9.noarch pigz-2.5-4.el9.x86_64 pkgconf-1.7.3-10.el9.x86_64 pkgconf-m4-1.7.3-10.el9.noarch pkgconf-pkg-config-1.7.3-10.el9.x86_64 podman-5.6.0-2.el9.x86_64 policycoreutils-3.6-4.el9.x86_64 policycoreutils-python-utils-3.6-4.el9.noarch polkit-0.117-14.el9.x86_64 polkit-libs-0.117-14.el9.x86_64 polkit-pkla-compat-0.1-21.el9.x86_64 popt-1.18-8.el9.x86_64 prefixdevname-0.1.0-8.el9.x86_64 procps-ng-3.3.17-14.el9.x86_64 protobuf-c-1.3.3-13.el9.x86_64 psmisc-23.4-3.el9.x86_64 publicsuffix-list-dafsa-20210518-3.el9.noarch pyproject-srpm-macros-1.18.5-1.el9.noarch python3-3.9.25-3.el9.x86_64 python3-argcomplete-1.12.0-5.el9.noarch python3-attrs-20.3.0-7.el9.noarch python3-audit-3.1.5-8.el9.x86_64 python3-babel-2.9.1-2.el9.noarch python3-cffi-1.14.5-5.el9.x86_64 python3-chardet-4.0.0-5.el9.noarch python3-configobj-5.0.6-25.el9.noarch python3-cryptography-36.0.1-5.el9.x86_64 python3-dasbus-1.7-1.el9.noarch python3-dateutil-2.9.0.post0-1.el9.noarch python3-dbus-1.2.18-2.el9.x86_64 python3-devel-3.9.25-3.el9.x86_64 python3-distro-1.5.0-7.el9.noarch python3-dnf-4.14.0-31.el9.noarch python3-dnf-plugins-core-4.3.0-25.el9.noarch python3-enchant-3.2.0-5.el9.noarch python3-file-magic-5.39-16.el9.noarch python3-gobject-base-3.40.1-6.el9.x86_64 python3-gobject-base-noarch-3.40.1-6.el9.noarch python3-gpg-1.15.1-6.el9.x86_64 python3-hawkey-0.69.0-16.el9.x86_64 python3-idna-2.10-7.el9.1.noarch python3-jinja2-2.11.3-8.el9.noarch python3-jmespath-1.0.1-1.el9.noarch python3-jsonpatch-1.21-16.el9.noarch python3-jsonpointer-2.0-4.el9.noarch python3-jsonschema-3.2.0-13.el9.noarch python3-libcomps-0.1.18-1.el9.x86_64 python3-libdnf-0.69.0-16.el9.x86_64 python3-libs-3.9.25-3.el9.x86_64 python3-libselinux-3.6-3.el9.x86_64 python3-libsemanage-3.6-5.el9.x86_64 python3-libvirt-11.10.0-1.el9.x86_64 python3-libxml2-2.9.13-14.el9.x86_64 python3-lxml-4.6.5-3.el9.x86_64 python3-markupsafe-1.1.1-12.el9.x86_64 python3-netaddr-0.10.1-3.el9.noarch python3-netifaces-0.10.6-15.el9.x86_64 python3-oauthlib-3.1.1-5.el9.noarch python3-packaging-20.9-5.el9.noarch python3-pexpect-4.8.0-7.el9.noarch python3-pip-21.3.1-1.el9.noarch python3-pip-wheel-21.3.1-1.el9.noarch python3-ply-3.11-14.el9.noarch python3-policycoreutils-3.6-4.el9.noarch python3-prettytable-0.7.2-27.el9.noarch python3-ptyprocess-0.6.0-12.el9.noarch python3-pycparser-2.20-6.el9.noarch python3-pyparsing-2.4.7-9.el9.noarch python3-pyrsistent-0.17.3-8.el9.x86_64 python3-pyserial-3.4-12.el9.noarch python3-pysocks-1.7.1-12.el9.noarch python3-pytz-2021.1-5.el9.noarch python3-pyyaml-5.4.1-6.el9.x86_64 python3-requests-2.25.1-10.el9.noarch python3-resolvelib-0.5.4-5.el9.noarch python3-rpm-4.16.1.3-40.el9.x86_64 python3-rpm-generators-12-9.el9.noarch python3-rpm-macros-3.9-54.el9.noarch python3-setools-4.4.4-1.el9.x86_64 python3-setuptools-53.0.0-15.el9.noarch python3-setuptools-wheel-53.0.0-15.el9.noarch python3-six-1.15.0-9.el9.noarch python3-systemd-234-19.el9.x86_64 python3-urllib3-1.26.5-6.el9.noarch python-rpm-macros-3.9-54.el9.noarch python-srpm-macros-3.9-54.el9.noarch python-unversioned-command-3.9.25-3.el9.noarch qemu-guest-agent-10.1.0-10.el9.x86_64 qt5-srpm-macros-5.15.9-1.el9.noarch quota-4.09-4.el9.x86_64 quota-nls-4.09-4.el9.noarch readline-8.1-4.el9.x86_64 readline-devel-8.1-4.el9.x86_64 redhat-rpm-config-210-1.el9.noarch rootfiles-8.1-35.el9.noarch rpcbind-1.2.6-7.el9.x86_64 rpm-4.16.1.3-40.el9.x86_64 rpm-build-4.16.1.3-40.el9.x86_64 rpm-build-libs-4.16.1.3-40.el9.x86_64 rpm-libs-4.16.1.3-40.el9.x86_64 rpmlint-1.11-19.el9.noarch rpm-plugin-audit-4.16.1.3-40.el9.x86_64 rpm-plugin-selinux-4.16.1.3-40.el9.x86_64 rpm-plugin-systemd-inhibit-4.16.1.3-40.el9.x86_64 rpm-sign-4.16.1.3-40.el9.x86_64 rpm-sign-libs-4.16.1.3-40.el9.x86_64 rsync-3.2.5-4.el9.x86_64 rsyslog-8.2510.0-2.el9.x86_64 rsyslog-logrotate-8.2510.0-2.el9.x86_64 ruby-3.0.7-165.el9.x86_64 ruby-default-gems-3.0.7-165.el9.noarch ruby-devel-3.0.7-165.el9.x86_64 rubygem-bigdecimal-3.0.0-165.el9.x86_64 rubygem-bundler-2.2.33-165.el9.noarch rubygem-io-console-0.5.7-165.el9.x86_64 rubygem-json-2.5.1-165.el9.x86_64 rubygem-psych-3.3.2-165.el9.x86_64 rubygem-rdoc-6.3.4.1-165.el9.noarch rubygems-3.2.33-165.el9.noarch ruby-libs-3.0.7-165.el9.x86_64 rust-srpm-macros-17-4.el9.noarch sed-4.8-9.el9.x86_64 selinux-policy-38.1.71-1.el9.noarch selinux-policy-targeted-38.1.71-1.el9.noarch setroubleshoot-plugins-3.3.14-4.el9.noarch setroubleshoot-server-3.3.35-2.el9.x86_64 setup-2.13.7-10.el9.noarch sg3_utils-1.47-10.el9.x86_64 sg3_utils-libs-1.47-10.el9.x86_64 shadow-utils-4.9-16.el9.x86_64 shadow-utils-subid-4.9-16.el9.x86_64 shared-mime-info-2.1-5.el9.x86_64 skopeo-1.20.0-2.el9.x86_64 slang-2.3.2-11.el9.x86_64 slirp4netns-1.3.3-1.el9.x86_64 snappy-1.1.8-8.el9.x86_64 sos-4.10.1-2.el9.noarch sqlite-3.34.1-9.el9.x86_64 sqlite-libs-3.34.1-9.el9.x86_64 squashfs-tools-4.4-10.git1.el9.x86_64 sscg-4.0.3-2.el9.x86_64 sshpass-1.09-4.el9.x86_64 sssd-client-2.9.7-5.el9.x86_64 sssd-common-2.9.7-5.el9.x86_64 sssd-kcm-2.9.7-5.el9.x86_64 sssd-nfs-idmap-2.9.7-5.el9.x86_64 sudo-1.9.5p2-13.el9.x86_64 systemd-252-64.el9.x86_64 systemd-devel-252-64.el9.x86_64 systemd-libs-252-64.el9.x86_64 systemd-pam-252-64.el9.x86_64 systemd-rpm-macros-252-64.el9.noarch systemd-udev-252-64.el9.x86_64 tar-1.34-9.el9.x86_64 tcl-8.6.10-7.el9.x86_64 tcpdump-4.99.0-9.el9.x86_64 teamd-1.31-16.el9.x86_64 time-1.9-18.el9.x86_64 tmux-3.2a-5.el9.x86_64 tpm2-tss-3.2.3-1.el9.x86_64 traceroute-2.1.1-1.el9.x86_64 tzdata-2025c-1.el9.noarch unzip-6.0-59.el9.x86_64 userspace-rcu-0.12.1-6.el9.x86_64 util-linux-2.37.4-21.el9.x86_64 util-linux-core-2.37.4-21.el9.x86_64 vim-minimal-8.2.2637-23.el9.x86_64 virt-install-5.0.0-1.el9.noarch virt-manager-common-5.0.0-1.el9.noarch webkit2gtk3-jsc-2.50.4-1.el9.x86_64 wget-1.21.1-8.el9.x86_64 which-2.21-30.el9.x86_64 xfsprogs-6.4.0-7.el9.x86_64 xmlstarlet-1.6.1-20.el9.x86_64 xorriso-1.5.4-5.el9.x86_64 xz-5.2.5-8.el9.x86_64 xz-devel-5.2.5-8.el9.x86_64 xz-libs-5.2.5-8.el9.x86_64 yajl-2.1.0-25.el9.x86_64 yum-4.14.0-31.el9.noarch yum-utils-4.3.0-25.el9.noarch zip-3.0-35.el9.x86_64 zlib-1.2.11-41.el9.x86_64 zlib-devel-1.2.11-41.el9.x86_64 zstd-1.5.5-1.el9.x86_64 home/zuul/zuul-output/logs/python.log0000644000175000017500000000520115134437401017112 0ustar zuulzuulPython 3.9.25 pip 21.3.1 from /usr/lib/python3.9/site-packages/pip (python 3.9) ansible [core 2.15.13] config file = /etc/ansible/ansible.cfg configured module search path = ['/home/zuul/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /home/zuul/.local/lib/python3.9/site-packages/ansible ansible collection location = /home/zuul/.ansible/collections:/usr/share/ansible/collections executable location = /home/zuul/.local/bin/ansible python version = 3.9.25 (main, Jan 14 2026, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-14)] (/usr/bin/python3) jinja version = 3.1.6 libyaml = True ansible-core==2.15.13 argcomplete==1.12.0 attrs==25.4.0 autopage==0.5.2 Babel==2.9.1 certifi==2026.1.4 cffi==2.0.0 chardet==4.0.0 charset-normalizer==3.4.4 cliff==4.9.1 cloud-init==24.4 cmd2==2.7.0 cockpit @ file:///builddir/build/BUILD/cockpit-348/tmp/wheel/cockpit-348-py3-none-any.whl configobj==5.0.6 cryptography==43.0.3 dasbus==1.7 dbus-python==1.2.18 debtcollector==3.0.0 decorator==5.2.1 distro==1.5.0 dogpile.cache==1.4.1 durationpy==0.10 file-magic==0.4.0 google-auth==2.47.0 gpg==1.15.1 idna==2.10 importlib-resources==5.0.7 importlib_metadata==8.7.1 iso8601==2.1.0 Jinja2==3.1.6 jmespath==1.0.1 jsonpatch==1.21 jsonpointer==2.0 jsonschema==4.23.0 jsonschema-specifications==2025.9.1 keystoneauth1==5.11.1 kubernetes==31.0.0 kubernetes-validate==1.31.0 libcomps==0.1.18 libvirt-python==11.10.0 lxml==4.6.5 markdown-it-py==3.0.0 MarkupSafe==3.0.3 mdurl==0.1.2 msgpack==1.1.2 netaddr==1.3.0 netifaces==0.10.6 oauthlib==3.2.2 openstacksdk==4.1.0 os-service-types==1.7.0 osc-lib==4.0.2 oslo.config==10.0.0 oslo.i18n==6.6.0 oslo.serialization==5.8.0 oslo.utils==9.1.0 packaging==20.9 pbr==7.0.3 pexpect==4.8.0 platformdirs==4.4.0 ply==3.11 prettytable==0.7.2 psutil==7.2.1 ptyprocess==0.6.0 pyasn1==0.6.2 pyasn1_modules==0.4.2 pycparser==2.23 pyenchant==3.2.0 Pygments==2.19.2 PyGObject==3.40.1 pyOpenSSL==24.2.1 pyparsing==2.4.7 pyperclip==1.11.0 pyrsistent==0.17.3 pyserial==3.4 PySocks==1.7.1 python-cinderclient==9.7.0 python-dateutil==2.9.0.post0 python-keystoneclient==5.6.0 python-openstackclient==8.0.0 pytz==2021.1 PyYAML==5.4.1 referencing==0.36.2 requests==2.32.5 requests-oauthlib==2.0.0 requestsexceptions==1.4.0 resolvelib==0.5.4 rfc3986==2.0.0 rich==14.2.0 rich-argparse==1.7.2 rpds-py==0.27.1 rpm==4.16.1.3 rsa==4.9.1 selinux==3.6 sepolicy==3.6 setools==4.4.4 setroubleshoot @ file:///builddir/build/BUILD/setroubleshoot-3.3.35/src six==1.15.0 sos==4.10.1 stevedore==5.5.0 systemd-python==234 typing_extensions==4.15.0 tzdata==2025.3 urllib3==1.26.5 wcwidth==0.3.0 websocket-client==1.9.0 wrapt==2.0.1 zipp==3.23.0 home/zuul/zuul-output/logs/dmesg.log0000644000175000017500000014753015134437401016704 0ustar zuulzuul[Thu Jan 22 11:49:48 2026] Linux version 5.14.0-661.el9.x86_64 (mockbuild@x86-05.stream.rdu2.redhat.com) (gcc (GCC) 11.5.0 20240719 (Red Hat 11.5.0-14), GNU ld version 2.35.2-69.el9) #1 SMP PREEMPT_DYNAMIC Fri Jan 16 09:19:22 UTC 2026 [Thu Jan 22 11:49:48 2026] The list of certified hardware and cloud instances for Red Hat Enterprise Linux 9 can be viewed at the Red Hat Ecosystem Catalog, https://catalog.redhat.com. [Thu Jan 22 11:49:48 2026] Command line: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 root=UUID=22ac9141-3960-4912-b20e-19fc8a328d40 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M [Thu Jan 22 11:49:48 2026] BIOS-provided physical RAM map: [Thu Jan 22 11:49:48 2026] BIOS-e820: [mem 0x0000000000000000-0x000000000009fbff] usable [Thu Jan 22 11:49:48 2026] BIOS-e820: [mem 0x000000000009fc00-0x000000000009ffff] reserved [Thu Jan 22 11:49:48 2026] BIOS-e820: [mem 0x00000000000f0000-0x00000000000fffff] reserved [Thu Jan 22 11:49:48 2026] BIOS-e820: [mem 0x0000000000100000-0x00000000bffdafff] usable [Thu Jan 22 11:49:48 2026] BIOS-e820: [mem 0x00000000bffdb000-0x00000000bfffffff] reserved [Thu Jan 22 11:49:48 2026] BIOS-e820: [mem 0x00000000feffc000-0x00000000feffffff] reserved [Thu Jan 22 11:49:48 2026] BIOS-e820: [mem 0x00000000fffc0000-0x00000000ffffffff] reserved [Thu Jan 22 11:49:48 2026] BIOS-e820: [mem 0x0000000100000000-0x000000023fffffff] usable [Thu Jan 22 11:49:48 2026] NX (Execute Disable) protection: active [Thu Jan 22 11:49:48 2026] APIC: Static calls initialized [Thu Jan 22 11:49:48 2026] SMBIOS 2.8 present. [Thu Jan 22 11:49:48 2026] DMI: OpenStack Foundation OpenStack Nova, BIOS 1.15.0-1 04/01/2014 [Thu Jan 22 11:49:48 2026] Hypervisor detected: KVM [Thu Jan 22 11:49:48 2026] kvm-clock: Using msrs 4b564d01 and 4b564d00 [Thu Jan 22 11:49:48 2026] kvm-clock: using sched offset of 3126263002 cycles [Thu Jan 22 11:49:48 2026] clocksource: kvm-clock: mask: 0xffffffffffffffff max_cycles: 0x1cd42e4dffb, max_idle_ns: 881590591483 ns [Thu Jan 22 11:49:48 2026] tsc: Detected 2800.000 MHz processor [Thu Jan 22 11:49:48 2026] e820: update [mem 0x00000000-0x00000fff] usable ==> reserved [Thu Jan 22 11:49:48 2026] e820: remove [mem 0x000a0000-0x000fffff] usable [Thu Jan 22 11:49:48 2026] last_pfn = 0x240000 max_arch_pfn = 0x400000000 [Thu Jan 22 11:49:48 2026] MTRR map: 4 entries (3 fixed + 1 variable; max 19), built from 8 variable MTRRs [Thu Jan 22 11:49:48 2026] x86/PAT: Configuration [0-7]: WB WC UC- UC WB WP UC- WT [Thu Jan 22 11:49:48 2026] last_pfn = 0xbffdb max_arch_pfn = 0x400000000 [Thu Jan 22 11:49:48 2026] found SMP MP-table at [mem 0x000f5ae0-0x000f5aef] [Thu Jan 22 11:49:48 2026] Using GB pages for direct mapping [Thu Jan 22 11:49:48 2026] RAMDISK: [mem 0x2d426000-0x32a0afff] [Thu Jan 22 11:49:48 2026] ACPI: Early table checksum verification disabled [Thu Jan 22 11:49:48 2026] ACPI: RSDP 0x00000000000F5AA0 000014 (v00 BOCHS ) [Thu Jan 22 11:49:48 2026] ACPI: RSDT 0x00000000BFFE16BD 000030 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Thu Jan 22 11:49:48 2026] ACPI: FACP 0x00000000BFFE1571 000074 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Thu Jan 22 11:49:48 2026] ACPI: DSDT 0x00000000BFFDFC80 0018F1 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Thu Jan 22 11:49:48 2026] ACPI: FACS 0x00000000BFFDFC40 000040 [Thu Jan 22 11:49:48 2026] ACPI: APIC 0x00000000BFFE15E5 0000B0 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Thu Jan 22 11:49:48 2026] ACPI: WAET 0x00000000BFFE1695 000028 (v01 BOCHS BXPC 00000001 BXPC 00000001) [Thu Jan 22 11:49:48 2026] ACPI: Reserving FACP table memory at [mem 0xbffe1571-0xbffe15e4] [Thu Jan 22 11:49:48 2026] ACPI: Reserving DSDT table memory at [mem 0xbffdfc80-0xbffe1570] [Thu Jan 22 11:49:48 2026] ACPI: Reserving FACS table memory at [mem 0xbffdfc40-0xbffdfc7f] [Thu Jan 22 11:49:48 2026] ACPI: Reserving APIC table memory at [mem 0xbffe15e5-0xbffe1694] [Thu Jan 22 11:49:48 2026] ACPI: Reserving WAET table memory at [mem 0xbffe1695-0xbffe16bc] [Thu Jan 22 11:49:48 2026] No NUMA configuration found [Thu Jan 22 11:49:48 2026] Faking a node at [mem 0x0000000000000000-0x000000023fffffff] [Thu Jan 22 11:49:48 2026] NODE_DATA(0) allocated [mem 0x23ffd5000-0x23fffffff] [Thu Jan 22 11:49:48 2026] crashkernel reserved: 0x00000000af000000 - 0x00000000bf000000 (256 MB) [Thu Jan 22 11:49:48 2026] Zone ranges: [Thu Jan 22 11:49:48 2026] DMA [mem 0x0000000000001000-0x0000000000ffffff] [Thu Jan 22 11:49:48 2026] DMA32 [mem 0x0000000001000000-0x00000000ffffffff] [Thu Jan 22 11:49:48 2026] Normal [mem 0x0000000100000000-0x000000023fffffff] [Thu Jan 22 11:49:48 2026] Device empty [Thu Jan 22 11:49:48 2026] Movable zone start for each node [Thu Jan 22 11:49:48 2026] Early memory node ranges [Thu Jan 22 11:49:48 2026] node 0: [mem 0x0000000000001000-0x000000000009efff] [Thu Jan 22 11:49:48 2026] node 0: [mem 0x0000000000100000-0x00000000bffdafff] [Thu Jan 22 11:49:48 2026] node 0: [mem 0x0000000100000000-0x000000023fffffff] [Thu Jan 22 11:49:48 2026] Initmem setup node 0 [mem 0x0000000000001000-0x000000023fffffff] [Thu Jan 22 11:49:48 2026] On node 0, zone DMA: 1 pages in unavailable ranges [Thu Jan 22 11:49:48 2026] On node 0, zone DMA: 97 pages in unavailable ranges [Thu Jan 22 11:49:48 2026] On node 0, zone Normal: 37 pages in unavailable ranges [Thu Jan 22 11:49:48 2026] ACPI: PM-Timer IO Port: 0x608 [Thu Jan 22 11:49:48 2026] ACPI: LAPIC_NMI (acpi_id[0xff] dfl dfl lint[0x1]) [Thu Jan 22 11:49:48 2026] IOAPIC[0]: apic_id 0, version 17, address 0xfec00000, GSI 0-23 [Thu Jan 22 11:49:48 2026] ACPI: INT_SRC_OVR (bus 0 bus_irq 0 global_irq 2 dfl dfl) [Thu Jan 22 11:49:48 2026] ACPI: INT_SRC_OVR (bus 0 bus_irq 5 global_irq 5 high level) [Thu Jan 22 11:49:48 2026] ACPI: INT_SRC_OVR (bus 0 bus_irq 9 global_irq 9 high level) [Thu Jan 22 11:49:48 2026] ACPI: INT_SRC_OVR (bus 0 bus_irq 10 global_irq 10 high level) [Thu Jan 22 11:49:48 2026] ACPI: INT_SRC_OVR (bus 0 bus_irq 11 global_irq 11 high level) [Thu Jan 22 11:49:48 2026] ACPI: Using ACPI (MADT) for SMP configuration information [Thu Jan 22 11:49:48 2026] TSC deadline timer available [Thu Jan 22 11:49:48 2026] CPU topo: Max. logical packages: 8 [Thu Jan 22 11:49:48 2026] CPU topo: Max. logical dies: 8 [Thu Jan 22 11:49:48 2026] CPU topo: Max. dies per package: 1 [Thu Jan 22 11:49:48 2026] CPU topo: Max. threads per core: 1 [Thu Jan 22 11:49:48 2026] CPU topo: Num. cores per package: 1 [Thu Jan 22 11:49:48 2026] CPU topo: Num. threads per package: 1 [Thu Jan 22 11:49:48 2026] CPU topo: Allowing 8 present CPUs plus 0 hotplug CPUs [Thu Jan 22 11:49:48 2026] kvm-guest: APIC: eoi() replaced with kvm_guest_apic_eoi_write() [Thu Jan 22 11:49:48 2026] PM: hibernation: Registered nosave memory: [mem 0x00000000-0x00000fff] [Thu Jan 22 11:49:48 2026] PM: hibernation: Registered nosave memory: [mem 0x0009f000-0x0009ffff] [Thu Jan 22 11:49:48 2026] PM: hibernation: Registered nosave memory: [mem 0x000a0000-0x000effff] [Thu Jan 22 11:49:48 2026] PM: hibernation: Registered nosave memory: [mem 0x000f0000-0x000fffff] [Thu Jan 22 11:49:48 2026] PM: hibernation: Registered nosave memory: [mem 0xbffdb000-0xbfffffff] [Thu Jan 22 11:49:48 2026] PM: hibernation: Registered nosave memory: [mem 0xc0000000-0xfeffbfff] [Thu Jan 22 11:49:48 2026] PM: hibernation: Registered nosave memory: [mem 0xfeffc000-0xfeffffff] [Thu Jan 22 11:49:48 2026] PM: hibernation: Registered nosave memory: [mem 0xff000000-0xfffbffff] [Thu Jan 22 11:49:48 2026] PM: hibernation: Registered nosave memory: [mem 0xfffc0000-0xffffffff] [Thu Jan 22 11:49:48 2026] [mem 0xc0000000-0xfeffbfff] available for PCI devices [Thu Jan 22 11:49:48 2026] Booting paravirtualized kernel on KVM [Thu Jan 22 11:49:48 2026] clocksource: refined-jiffies: mask: 0xffffffff max_cycles: 0xffffffff, max_idle_ns: 1910969940391419 ns [Thu Jan 22 11:49:48 2026] setup_percpu: NR_CPUS:8192 nr_cpumask_bits:8 nr_cpu_ids:8 nr_node_ids:1 [Thu Jan 22 11:49:48 2026] percpu: Embedded 64 pages/cpu s225280 r8192 d28672 u262144 [Thu Jan 22 11:49:48 2026] pcpu-alloc: s225280 r8192 d28672 u262144 alloc=1*2097152 [Thu Jan 22 11:49:48 2026] pcpu-alloc: [0] 0 1 2 3 4 5 6 7 [Thu Jan 22 11:49:48 2026] kvm-guest: PV spinlocks disabled, no host support [Thu Jan 22 11:49:48 2026] Kernel command line: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 root=UUID=22ac9141-3960-4912-b20e-19fc8a328d40 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M [Thu Jan 22 11:49:48 2026] Unknown kernel command line parameters "BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64", will be passed to user space. [Thu Jan 22 11:49:48 2026] random: crng init done [Thu Jan 22 11:49:48 2026] Dentry cache hash table entries: 1048576 (order: 11, 8388608 bytes, linear) [Thu Jan 22 11:49:48 2026] Inode-cache hash table entries: 524288 (order: 10, 4194304 bytes, linear) [Thu Jan 22 11:49:48 2026] Fallback order for Node 0: 0 [Thu Jan 22 11:49:48 2026] Built 1 zonelists, mobility grouping on. Total pages: 2064091 [Thu Jan 22 11:49:48 2026] Policy zone: Normal [Thu Jan 22 11:49:48 2026] mem auto-init: stack:off, heap alloc:off, heap free:off [Thu Jan 22 11:49:48 2026] software IO TLB: area num 8. [Thu Jan 22 11:49:48 2026] SLUB: HWalign=64, Order=0-3, MinObjects=0, CPUs=8, Nodes=1 [Thu Jan 22 11:49:48 2026] ftrace: allocating 49417 entries in 194 pages [Thu Jan 22 11:49:48 2026] ftrace: allocated 194 pages with 3 groups [Thu Jan 22 11:49:48 2026] Dynamic Preempt: voluntary [Thu Jan 22 11:49:48 2026] rcu: Preemptible hierarchical RCU implementation. [Thu Jan 22 11:49:48 2026] rcu: RCU event tracing is enabled. [Thu Jan 22 11:49:48 2026] rcu: RCU restricting CPUs from NR_CPUS=8192 to nr_cpu_ids=8. [Thu Jan 22 11:49:48 2026] Trampoline variant of Tasks RCU enabled. [Thu Jan 22 11:49:48 2026] Rude variant of Tasks RCU enabled. [Thu Jan 22 11:49:48 2026] Tracing variant of Tasks RCU enabled. [Thu Jan 22 11:49:48 2026] rcu: RCU calculated value of scheduler-enlistment delay is 100 jiffies. [Thu Jan 22 11:49:48 2026] rcu: Adjusting geometry for rcu_fanout_leaf=16, nr_cpu_ids=8 [Thu Jan 22 11:49:48 2026] RCU Tasks: Setting shift to 3 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=8. [Thu Jan 22 11:49:48 2026] RCU Tasks Rude: Setting shift to 3 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=8. [Thu Jan 22 11:49:48 2026] RCU Tasks Trace: Setting shift to 3 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=8. [Thu Jan 22 11:49:48 2026] NR_IRQS: 524544, nr_irqs: 488, preallocated irqs: 16 [Thu Jan 22 11:49:48 2026] rcu: srcu_init: Setting srcu_struct sizes based on contention. [Thu Jan 22 11:49:48 2026] kfence: initialized - using 2097152 bytes for 255 objects at 0x(____ptrval____)-0x(____ptrval____) [Thu Jan 22 11:49:48 2026] Console: colour VGA+ 80x25 [Thu Jan 22 11:49:48 2026] printk: console [ttyS0] enabled [Thu Jan 22 11:49:48 2026] ACPI: Core revision 20230331 [Thu Jan 22 11:49:48 2026] APIC: Switch to symmetric I/O mode setup [Thu Jan 22 11:49:48 2026] x2apic enabled [Thu Jan 22 11:49:48 2026] APIC: Switched APIC routing to: physical x2apic [Thu Jan 22 11:49:48 2026] tsc: Marking TSC unstable due to TSCs unsynchronized [Thu Jan 22 11:49:48 2026] Calibrating delay loop (skipped) preset value.. 5600.00 BogoMIPS (lpj=2800000) [Thu Jan 22 11:49:48 2026] x86/cpu: User Mode Instruction Prevention (UMIP) activated [Thu Jan 22 11:49:48 2026] Last level iTLB entries: 4KB 512, 2MB 255, 4MB 127 [Thu Jan 22 11:49:48 2026] Last level dTLB entries: 4KB 512, 2MB 255, 4MB 127, 1GB 0 [Thu Jan 22 11:49:48 2026] Spectre V1 : Mitigation: usercopy/swapgs barriers and __user pointer sanitization [Thu Jan 22 11:49:48 2026] Spectre V2 : Mitigation: Retpolines [Thu Jan 22 11:49:48 2026] Spectre V2 : Spectre v2 / SpectreRSB: Filling RSB on context switch and VMEXIT [Thu Jan 22 11:49:48 2026] Spectre V2 : Enabling Speculation Barrier for firmware calls [Thu Jan 22 11:49:48 2026] RETBleed: Mitigation: untrained return thunk [Thu Jan 22 11:49:48 2026] Spectre V2 : mitigation: Enabling conditional Indirect Branch Prediction Barrier [Thu Jan 22 11:49:48 2026] Speculative Store Bypass: Mitigation: Speculative Store Bypass disabled via prctl [Thu Jan 22 11:49:48 2026] Speculative Return Stack Overflow: IBPB-extending microcode not applied! [Thu Jan 22 11:49:48 2026] Speculative Return Stack Overflow: WARNING: See https://kernel.org/doc/html/latest/admin-guide/hw-vuln/srso.html for mitigation options. [Thu Jan 22 11:49:48 2026] x86/bugs: return thunk changed [Thu Jan 22 11:49:48 2026] Speculative Return Stack Overflow: Vulnerable: Safe RET, no microcode [Thu Jan 22 11:49:48 2026] x86/fpu: Supporting XSAVE feature 0x001: 'x87 floating point registers' [Thu Jan 22 11:49:48 2026] x86/fpu: Supporting XSAVE feature 0x002: 'SSE registers' [Thu Jan 22 11:49:48 2026] x86/fpu: Supporting XSAVE feature 0x004: 'AVX registers' [Thu Jan 22 11:49:48 2026] x86/fpu: xstate_offset[2]: 576, xstate_sizes[2]: 256 [Thu Jan 22 11:49:48 2026] x86/fpu: Enabled xstate features 0x7, context size is 832 bytes, using 'compacted' format. [Thu Jan 22 11:49:48 2026] Freeing SMP alternatives memory: 40K [Thu Jan 22 11:49:48 2026] pid_max: default: 32768 minimum: 301 [Thu Jan 22 11:49:48 2026] LSM: initializing lsm=lockdown,capability,landlock,yama,integrity,selinux,bpf [Thu Jan 22 11:49:48 2026] landlock: Up and running. [Thu Jan 22 11:49:48 2026] Yama: becoming mindful. [Thu Jan 22 11:49:48 2026] SELinux: Initializing. [Thu Jan 22 11:49:48 2026] LSM support for eBPF active [Thu Jan 22 11:49:48 2026] Mount-cache hash table entries: 16384 (order: 5, 131072 bytes, linear) [Thu Jan 22 11:49:48 2026] Mountpoint-cache hash table entries: 16384 (order: 5, 131072 bytes, linear) [Thu Jan 22 11:49:48 2026] smpboot: CPU0: AMD EPYC-Rome Processor (family: 0x17, model: 0x31, stepping: 0x0) [Thu Jan 22 11:49:48 2026] Performance Events: Fam17h+ core perfctr, AMD PMU driver. [Thu Jan 22 11:49:48 2026] ... version: 0 [Thu Jan 22 11:49:48 2026] ... bit width: 48 [Thu Jan 22 11:49:48 2026] ... generic registers: 6 [Thu Jan 22 11:49:48 2026] ... value mask: 0000ffffffffffff [Thu Jan 22 11:49:48 2026] ... max period: 00007fffffffffff [Thu Jan 22 11:49:48 2026] ... fixed-purpose events: 0 [Thu Jan 22 11:49:48 2026] ... event mask: 000000000000003f [Thu Jan 22 11:49:48 2026] signal: max sigframe size: 1776 [Thu Jan 22 11:49:48 2026] rcu: Hierarchical SRCU implementation. [Thu Jan 22 11:49:48 2026] rcu: Max phase no-delay instances is 400. [Thu Jan 22 11:49:48 2026] smp: Bringing up secondary CPUs ... [Thu Jan 22 11:49:48 2026] smpboot: x86: Booting SMP configuration: [Thu Jan 22 11:49:48 2026] .... node #0, CPUs: #1 #2 #3 #4 #5 #6 #7 [Thu Jan 22 11:49:48 2026] smp: Brought up 1 node, 8 CPUs [Thu Jan 22 11:49:48 2026] smpboot: Total of 8 processors activated (44800.00 BogoMIPS) [Thu Jan 22 11:49:48 2026] node 0 deferred pages initialised in 10ms [Thu Jan 22 11:49:48 2026] Memory: 7763764K/8388068K available (16384K kernel code, 5797K rwdata, 13916K rodata, 4200K init, 7192K bss, 618360K reserved, 0K cma-reserved) [Thu Jan 22 11:49:48 2026] devtmpfs: initialized [Thu Jan 22 11:49:48 2026] x86/mm: Memory block size: 128MB [Thu Jan 22 11:49:48 2026] clocksource: jiffies: mask: 0xffffffff max_cycles: 0xffffffff, max_idle_ns: 1911260446275000 ns [Thu Jan 22 11:49:48 2026] futex hash table entries: 2048 (131072 bytes on 1 NUMA nodes, total 128 KiB, linear). [Thu Jan 22 11:49:48 2026] pinctrl core: initialized pinctrl subsystem [Thu Jan 22 11:49:48 2026] NET: Registered PF_NETLINK/PF_ROUTE protocol family [Thu Jan 22 11:49:48 2026] DMA: preallocated 1024 KiB GFP_KERNEL pool for atomic allocations [Thu Jan 22 11:49:48 2026] DMA: preallocated 1024 KiB GFP_KERNEL|GFP_DMA pool for atomic allocations [Thu Jan 22 11:49:48 2026] DMA: preallocated 1024 KiB GFP_KERNEL|GFP_DMA32 pool for atomic allocations [Thu Jan 22 11:49:48 2026] audit: initializing netlink subsys (disabled) [Thu Jan 22 11:49:48 2026] audit: type=2000 audit(1769082589.153:1): state=initialized audit_enabled=0 res=1 [Thu Jan 22 11:49:48 2026] thermal_sys: Registered thermal governor 'fair_share' [Thu Jan 22 11:49:48 2026] thermal_sys: Registered thermal governor 'step_wise' [Thu Jan 22 11:49:48 2026] thermal_sys: Registered thermal governor 'user_space' [Thu Jan 22 11:49:48 2026] cpuidle: using governor menu [Thu Jan 22 11:49:48 2026] acpiphp: ACPI Hot Plug PCI Controller Driver version: 0.5 [Thu Jan 22 11:49:48 2026] PCI: Using configuration type 1 for base access [Thu Jan 22 11:49:48 2026] PCI: Using configuration type 1 for extended access [Thu Jan 22 11:49:48 2026] kprobes: kprobe jump-optimization is enabled. All kprobes are optimized if possible. [Thu Jan 22 11:49:48 2026] HugeTLB: registered 1.00 GiB page size, pre-allocated 0 pages [Thu Jan 22 11:49:48 2026] HugeTLB: 16380 KiB vmemmap can be freed for a 1.00 GiB page [Thu Jan 22 11:49:48 2026] HugeTLB: registered 2.00 MiB page size, pre-allocated 0 pages [Thu Jan 22 11:49:48 2026] HugeTLB: 28 KiB vmemmap can be freed for a 2.00 MiB page [Thu Jan 22 11:49:48 2026] Demotion targets for Node 0: null [Thu Jan 22 11:49:48 2026] cryptd: max_cpu_qlen set to 1000 [Thu Jan 22 11:49:48 2026] ACPI: Added _OSI(Module Device) [Thu Jan 22 11:49:48 2026] ACPI: Added _OSI(Processor Device) [Thu Jan 22 11:49:48 2026] ACPI: Added _OSI(Processor Aggregator Device) [Thu Jan 22 11:49:48 2026] ACPI: 1 ACPI AML tables successfully acquired and loaded [Thu Jan 22 11:49:48 2026] ACPI: Interpreter enabled [Thu Jan 22 11:49:48 2026] ACPI: PM: (supports S0 S3 S4 S5) [Thu Jan 22 11:49:48 2026] ACPI: Using IOAPIC for interrupt routing [Thu Jan 22 11:49:48 2026] PCI: Using host bridge windows from ACPI; if necessary, use "pci=nocrs" and report a bug [Thu Jan 22 11:49:48 2026] PCI: Using E820 reservations for host bridge windows [Thu Jan 22 11:49:48 2026] ACPI: Enabled 2 GPEs in block 00 to 0F [Thu Jan 22 11:49:48 2026] ACPI: PCI Root Bridge [PCI0] (domain 0000 [bus 00-ff]) [Thu Jan 22 11:49:48 2026] acpi PNP0A03:00: _OSC: OS supports [ExtendedConfig ASPM ClockPM Segments MSI EDR HPX-Type3] [Thu Jan 22 11:49:48 2026] acpiphp: Slot [3] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [4] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [5] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [6] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [7] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [8] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [9] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [10] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [11] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [12] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [13] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [14] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [15] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [16] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [17] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [18] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [19] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [20] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [21] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [22] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [23] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [24] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [25] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [26] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [27] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [28] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [29] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [30] registered [Thu Jan 22 11:49:48 2026] acpiphp: Slot [31] registered [Thu Jan 22 11:49:48 2026] PCI host bridge to bus 0000:00 [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: root bus resource [io 0x0000-0x0cf7 window] [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: root bus resource [io 0x0d00-0xffff window] [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: root bus resource [mem 0x000a0000-0x000bffff window] [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: root bus resource [mem 0xc0000000-0xfebfffff window] [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: root bus resource [mem 0x240000000-0x2bfffffff window] [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: root bus resource [bus 00-ff] [Thu Jan 22 11:49:48 2026] pci 0000:00:00.0: [8086:1237] type 00 class 0x060000 conventional PCI endpoint [Thu Jan 22 11:49:48 2026] pci 0000:00:01.0: [8086:7000] type 00 class 0x060100 conventional PCI endpoint [Thu Jan 22 11:49:48 2026] pci 0000:00:01.1: [8086:7010] type 00 class 0x010180 conventional PCI endpoint [Thu Jan 22 11:49:48 2026] pci 0000:00:01.1: BAR 4 [io 0xc140-0xc14f] [Thu Jan 22 11:49:48 2026] pci 0000:00:01.1: BAR 0 [io 0x01f0-0x01f7]: legacy IDE quirk [Thu Jan 22 11:49:48 2026] pci 0000:00:01.1: BAR 1 [io 0x03f6]: legacy IDE quirk [Thu Jan 22 11:49:48 2026] pci 0000:00:01.1: BAR 2 [io 0x0170-0x0177]: legacy IDE quirk [Thu Jan 22 11:49:48 2026] pci 0000:00:01.1: BAR 3 [io 0x0376]: legacy IDE quirk [Thu Jan 22 11:49:48 2026] pci 0000:00:01.2: [8086:7020] type 00 class 0x0c0300 conventional PCI endpoint [Thu Jan 22 11:49:48 2026] pci 0000:00:01.2: BAR 4 [io 0xc100-0xc11f] [Thu Jan 22 11:49:48 2026] pci 0000:00:01.3: [8086:7113] type 00 class 0x068000 conventional PCI endpoint [Thu Jan 22 11:49:48 2026] pci 0000:00:01.3: quirk: [io 0x0600-0x063f] claimed by PIIX4 ACPI [Thu Jan 22 11:49:48 2026] pci 0000:00:01.3: quirk: [io 0x0700-0x070f] claimed by PIIX4 SMB [Thu Jan 22 11:49:48 2026] pci 0000:00:02.0: [1af4:1050] type 00 class 0x030000 conventional PCI endpoint [Thu Jan 22 11:49:48 2026] pci 0000:00:02.0: BAR 0 [mem 0xfe000000-0xfe7fffff pref] [Thu Jan 22 11:49:48 2026] pci 0000:00:02.0: BAR 2 [mem 0xfe800000-0xfe803fff 64bit pref] [Thu Jan 22 11:49:48 2026] pci 0000:00:02.0: BAR 4 [mem 0xfeb90000-0xfeb90fff] [Thu Jan 22 11:49:48 2026] pci 0000:00:02.0: ROM [mem 0xfeb80000-0xfeb8ffff pref] [Thu Jan 22 11:49:48 2026] pci 0000:00:02.0: Video device with shadowed ROM at [mem 0x000c0000-0x000dffff] [Thu Jan 22 11:49:48 2026] pci 0000:00:03.0: [1af4:1000] type 00 class 0x020000 conventional PCI endpoint [Thu Jan 22 11:49:48 2026] pci 0000:00:03.0: BAR 0 [io 0xc080-0xc0bf] [Thu Jan 22 11:49:48 2026] pci 0000:00:03.0: BAR 1 [mem 0xfeb91000-0xfeb91fff] [Thu Jan 22 11:49:48 2026] pci 0000:00:03.0: BAR 4 [mem 0xfe804000-0xfe807fff 64bit pref] [Thu Jan 22 11:49:48 2026] pci 0000:00:03.0: ROM [mem 0xfeb00000-0xfeb7ffff pref] [Thu Jan 22 11:49:48 2026] pci 0000:00:04.0: [1af4:1001] type 00 class 0x010000 conventional PCI endpoint [Thu Jan 22 11:49:48 2026] pci 0000:00:04.0: BAR 0 [io 0xc000-0xc07f] [Thu Jan 22 11:49:48 2026] pci 0000:00:04.0: BAR 1 [mem 0xfeb92000-0xfeb92fff] [Thu Jan 22 11:49:48 2026] pci 0000:00:04.0: BAR 4 [mem 0xfe808000-0xfe80bfff 64bit pref] [Thu Jan 22 11:49:48 2026] pci 0000:00:05.0: [1af4:1002] type 00 class 0x00ff00 conventional PCI endpoint [Thu Jan 22 11:49:48 2026] pci 0000:00:05.0: BAR 0 [io 0xc0c0-0xc0ff] [Thu Jan 22 11:49:48 2026] pci 0000:00:05.0: BAR 4 [mem 0xfe80c000-0xfe80ffff 64bit pref] [Thu Jan 22 11:49:48 2026] pci 0000:00:06.0: [1af4:1005] type 00 class 0x00ff00 conventional PCI endpoint [Thu Jan 22 11:49:48 2026] pci 0000:00:06.0: BAR 0 [io 0xc120-0xc13f] [Thu Jan 22 11:49:48 2026] pci 0000:00:06.0: BAR 4 [mem 0xfe810000-0xfe813fff 64bit pref] [Thu Jan 22 11:49:48 2026] ACPI: PCI: Interrupt link LNKA configured for IRQ 10 [Thu Jan 22 11:49:48 2026] ACPI: PCI: Interrupt link LNKB configured for IRQ 10 [Thu Jan 22 11:49:48 2026] ACPI: PCI: Interrupt link LNKC configured for IRQ 11 [Thu Jan 22 11:49:48 2026] ACPI: PCI: Interrupt link LNKD configured for IRQ 11 [Thu Jan 22 11:49:48 2026] ACPI: PCI: Interrupt link LNKS configured for IRQ 9 [Thu Jan 22 11:49:48 2026] iommu: Default domain type: Translated [Thu Jan 22 11:49:48 2026] iommu: DMA domain TLB invalidation policy: lazy mode [Thu Jan 22 11:49:48 2026] SCSI subsystem initialized [Thu Jan 22 11:49:48 2026] ACPI: bus type USB registered [Thu Jan 22 11:49:48 2026] usbcore: registered new interface driver usbfs [Thu Jan 22 11:49:48 2026] usbcore: registered new interface driver hub [Thu Jan 22 11:49:48 2026] usbcore: registered new device driver usb [Thu Jan 22 11:49:48 2026] pps_core: LinuxPPS API ver. 1 registered [Thu Jan 22 11:49:48 2026] pps_core: Software ver. 5.3.6 - Copyright 2005-2007 Rodolfo Giometti [Thu Jan 22 11:49:48 2026] PTP clock support registered [Thu Jan 22 11:49:48 2026] EDAC MC: Ver: 3.0.0 [Thu Jan 22 11:49:48 2026] NetLabel: Initializing [Thu Jan 22 11:49:48 2026] NetLabel: domain hash size = 128 [Thu Jan 22 11:49:48 2026] NetLabel: protocols = UNLABELED CIPSOv4 CALIPSO [Thu Jan 22 11:49:48 2026] NetLabel: unlabeled traffic allowed by default [Thu Jan 22 11:49:48 2026] PCI: Using ACPI for IRQ routing [Thu Jan 22 11:49:48 2026] PCI: pci_cache_line_size set to 64 bytes [Thu Jan 22 11:49:48 2026] e820: reserve RAM buffer [mem 0x0009fc00-0x0009ffff] [Thu Jan 22 11:49:48 2026] e820: reserve RAM buffer [mem 0xbffdb000-0xbfffffff] [Thu Jan 22 11:49:48 2026] pci 0000:00:02.0: vgaarb: setting as boot VGA device [Thu Jan 22 11:49:48 2026] pci 0000:00:02.0: vgaarb: bridge control possible [Thu Jan 22 11:49:48 2026] pci 0000:00:02.0: vgaarb: VGA device added: decodes=io+mem,owns=io+mem,locks=none [Thu Jan 22 11:49:48 2026] vgaarb: loaded [Thu Jan 22 11:49:48 2026] clocksource: Switched to clocksource kvm-clock [Thu Jan 22 11:49:48 2026] VFS: Disk quotas dquot_6.6.0 [Thu Jan 22 11:49:48 2026] VFS: Dquot-cache hash table entries: 512 (order 0, 4096 bytes) [Thu Jan 22 11:49:48 2026] pnp: PnP ACPI init [Thu Jan 22 11:49:48 2026] pnp 00:03: [dma 2] [Thu Jan 22 11:49:48 2026] pnp: PnP ACPI: found 5 devices [Thu Jan 22 11:49:48 2026] clocksource: acpi_pm: mask: 0xffffff max_cycles: 0xffffff, max_idle_ns: 2085701024 ns [Thu Jan 22 11:49:48 2026] NET: Registered PF_INET protocol family [Thu Jan 22 11:49:48 2026] IP idents hash table entries: 131072 (order: 8, 1048576 bytes, linear) [Thu Jan 22 11:49:48 2026] tcp_listen_portaddr_hash hash table entries: 4096 (order: 4, 65536 bytes, linear) [Thu Jan 22 11:49:48 2026] Table-perturb hash table entries: 65536 (order: 6, 262144 bytes, linear) [Thu Jan 22 11:49:48 2026] TCP established hash table entries: 65536 (order: 7, 524288 bytes, linear) [Thu Jan 22 11:49:48 2026] TCP bind hash table entries: 65536 (order: 8, 1048576 bytes, linear) [Thu Jan 22 11:49:48 2026] TCP: Hash tables configured (established 65536 bind 65536) [Thu Jan 22 11:49:48 2026] MPTCP token hash table entries: 8192 (order: 5, 196608 bytes, linear) [Thu Jan 22 11:49:48 2026] UDP hash table entries: 4096 (order: 5, 131072 bytes, linear) [Thu Jan 22 11:49:48 2026] UDP-Lite hash table entries: 4096 (order: 5, 131072 bytes, linear) [Thu Jan 22 11:49:48 2026] NET: Registered PF_UNIX/PF_LOCAL protocol family [Thu Jan 22 11:49:48 2026] NET: Registered PF_XDP protocol family [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: resource 4 [io 0x0000-0x0cf7 window] [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: resource 5 [io 0x0d00-0xffff window] [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: resource 6 [mem 0x000a0000-0x000bffff window] [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: resource 7 [mem 0xc0000000-0xfebfffff window] [Thu Jan 22 11:49:48 2026] pci_bus 0000:00: resource 8 [mem 0x240000000-0x2bfffffff window] [Thu Jan 22 11:49:48 2026] pci 0000:00:01.0: PIIX3: Enabling Passive Release [Thu Jan 22 11:49:48 2026] pci 0000:00:00.0: Limiting direct PCI/PCI transfers [Thu Jan 22 11:49:48 2026] ACPI: \_SB_.LNKD: Enabled at IRQ 11 [Thu Jan 22 11:49:48 2026] pci 0000:00:01.2: quirk_usb_early_handoff+0x0/0x160 took 80950 usecs [Thu Jan 22 11:49:48 2026] PCI: CLS 0 bytes, default 64 [Thu Jan 22 11:49:48 2026] PCI-DMA: Using software bounce buffering for IO (SWIOTLB) [Thu Jan 22 11:49:48 2026] software IO TLB: mapped [mem 0x00000000ab000000-0x00000000af000000] (64MB) [Thu Jan 22 11:49:48 2026] ACPI: bus type thunderbolt registered [Thu Jan 22 11:49:48 2026] Trying to unpack rootfs image as initramfs... [Thu Jan 22 11:49:48 2026] Initialise system trusted keyrings [Thu Jan 22 11:49:48 2026] Key type blacklist registered [Thu Jan 22 11:49:48 2026] workingset: timestamp_bits=36 max_order=21 bucket_order=0 [Thu Jan 22 11:49:48 2026] zbud: loaded [Thu Jan 22 11:49:48 2026] integrity: Platform Keyring initialized [Thu Jan 22 11:49:48 2026] integrity: Machine keyring initialized [Thu Jan 22 11:49:48 2026] Freeing initrd memory: 87956K [Thu Jan 22 11:49:49 2026] NET: Registered PF_ALG protocol family [Thu Jan 22 11:49:49 2026] xor: automatically using best checksumming function avx [Thu Jan 22 11:49:49 2026] Key type asymmetric registered [Thu Jan 22 11:49:49 2026] Asymmetric key parser 'x509' registered [Thu Jan 22 11:49:49 2026] Block layer SCSI generic (bsg) driver version 0.4 loaded (major 246) [Thu Jan 22 11:49:49 2026] io scheduler mq-deadline registered [Thu Jan 22 11:49:49 2026] io scheduler kyber registered [Thu Jan 22 11:49:49 2026] io scheduler bfq registered [Thu Jan 22 11:49:49 2026] atomic64_test: passed for x86-64 platform with CX8 and with SSE [Thu Jan 22 11:49:49 2026] shpchp: Standard Hot Plug PCI Controller Driver version: 0.4 [Thu Jan 22 11:49:49 2026] input: Power Button as /devices/LNXSYSTM:00/LNXPWRBN:00/input/input0 [Thu Jan 22 11:49:49 2026] ACPI: button: Power Button [PWRF] [Thu Jan 22 11:49:49 2026] ACPI: \_SB_.LNKB: Enabled at IRQ 10 [Thu Jan 22 11:49:49 2026] ACPI: \_SB_.LNKC: Enabled at IRQ 11 [Thu Jan 22 11:49:49 2026] ACPI: \_SB_.LNKA: Enabled at IRQ 10 [Thu Jan 22 11:49:49 2026] Serial: 8250/16550 driver, 4 ports, IRQ sharing enabled [Thu Jan 22 11:49:49 2026] 00:00: ttyS0 at I/O 0x3f8 (irq = 4, base_baud = 115200) is a 16550A [Thu Jan 22 11:49:49 2026] Non-volatile memory driver v1.3 [Thu Jan 22 11:49:49 2026] rdac: device handler registered [Thu Jan 22 11:49:49 2026] hp_sw: device handler registered [Thu Jan 22 11:49:49 2026] emc: device handler registered [Thu Jan 22 11:49:49 2026] alua: device handler registered [Thu Jan 22 11:49:49 2026] uhci_hcd 0000:00:01.2: UHCI Host Controller [Thu Jan 22 11:49:49 2026] uhci_hcd 0000:00:01.2: new USB bus registered, assigned bus number 1 [Thu Jan 22 11:49:49 2026] uhci_hcd 0000:00:01.2: detected 2 ports [Thu Jan 22 11:49:49 2026] uhci_hcd 0000:00:01.2: irq 11, io port 0x0000c100 [Thu Jan 22 11:49:49 2026] usb usb1: New USB device found, idVendor=1d6b, idProduct=0001, bcdDevice= 5.14 [Thu Jan 22 11:49:49 2026] usb usb1: New USB device strings: Mfr=3, Product=2, SerialNumber=1 [Thu Jan 22 11:49:49 2026] usb usb1: Product: UHCI Host Controller [Thu Jan 22 11:49:49 2026] usb usb1: Manufacturer: Linux 5.14.0-661.el9.x86_64 uhci_hcd [Thu Jan 22 11:49:49 2026] usb usb1: SerialNumber: 0000:00:01.2 [Thu Jan 22 11:49:49 2026] hub 1-0:1.0: USB hub found [Thu Jan 22 11:49:49 2026] hub 1-0:1.0: 2 ports detected [Thu Jan 22 11:49:49 2026] usbcore: registered new interface driver usbserial_generic [Thu Jan 22 11:49:49 2026] usbserial: USB Serial support registered for generic [Thu Jan 22 11:49:49 2026] i8042: PNP: PS/2 Controller [PNP0303:KBD,PNP0f13:MOU] at 0x60,0x64 irq 1,12 [Thu Jan 22 11:49:49 2026] serio: i8042 KBD port at 0x60,0x64 irq 1 [Thu Jan 22 11:49:49 2026] serio: i8042 AUX port at 0x60,0x64 irq 12 [Thu Jan 22 11:49:49 2026] mousedev: PS/2 mouse device common for all mice [Thu Jan 22 11:49:49 2026] rtc_cmos 00:04: RTC can wake from S4 [Thu Jan 22 11:49:49 2026] input: AT Translated Set 2 keyboard as /devices/platform/i8042/serio0/input/input1 [Thu Jan 22 11:49:49 2026] input: VirtualPS/2 VMware VMMouse as /devices/platform/i8042/serio1/input/input4 [Thu Jan 22 11:49:49 2026] input: VirtualPS/2 VMware VMMouse as /devices/platform/i8042/serio1/input/input3 [Thu Jan 22 11:49:49 2026] rtc_cmos 00:04: registered as rtc0 [Thu Jan 22 11:49:49 2026] rtc_cmos 00:04: setting system clock to 2026-01-22T11:49:49 UTC (1769082589) [Thu Jan 22 11:49:49 2026] rtc_cmos 00:04: alarms up to one day, y3k, 242 bytes nvram [Thu Jan 22 11:49:49 2026] amd_pstate: the _CPC object is not present in SBIOS or ACPI disabled [Thu Jan 22 11:49:49 2026] hid: raw HID events driver (C) Jiri Kosina [Thu Jan 22 11:49:49 2026] usbcore: registered new interface driver usbhid [Thu Jan 22 11:49:49 2026] usbhid: USB HID core driver [Thu Jan 22 11:49:49 2026] drop_monitor: Initializing network drop monitor service [Thu Jan 22 11:49:49 2026] Initializing XFRM netlink socket [Thu Jan 22 11:49:49 2026] NET: Registered PF_INET6 protocol family [Thu Jan 22 11:49:49 2026] Segment Routing with IPv6 [Thu Jan 22 11:49:49 2026] NET: Registered PF_PACKET protocol family [Thu Jan 22 11:49:49 2026] mpls_gso: MPLS GSO support [Thu Jan 22 11:49:49 2026] IPI shorthand broadcast: enabled [Thu Jan 22 11:49:49 2026] AVX2 version of gcm_enc/dec engaged. [Thu Jan 22 11:49:49 2026] AES CTR mode by8 optimization enabled [Thu Jan 22 11:49:49 2026] sched_clock: Marking stable (1273017639, 144511120)->(1549961389, -132432630) [Thu Jan 22 11:49:49 2026] registered taskstats version 1 [Thu Jan 22 11:49:49 2026] Loading compiled-in X.509 certificates [Thu Jan 22 11:49:49 2026] Loaded X.509 cert 'The CentOS Project: CentOS Stream kernel signing key: 04453f216699002fd63185eeab832de990bee6d7' [Thu Jan 22 11:49:49 2026] Loaded X.509 cert 'Red Hat Enterprise Linux Driver Update Program (key 3): bf57f3e87362bc7229d9f465321773dfd1f77a80' [Thu Jan 22 11:49:49 2026] Loaded X.509 cert 'Red Hat Enterprise Linux kpatch signing key: 4d38fd864ebe18c5f0b72e3852e2014c3a676fc8' [Thu Jan 22 11:49:49 2026] Loaded X.509 cert 'RH-IMA-CA: Red Hat IMA CA: fb31825dd0e073685b264e3038963673f753959a' [Thu Jan 22 11:49:49 2026] Loaded X.509 cert 'Nvidia GPU OOT signing 001: 55e1cef88193e60419f0b0ec379c49f77545acf0' [Thu Jan 22 11:49:49 2026] Demotion targets for Node 0: null [Thu Jan 22 11:49:49 2026] page_owner is disabled [Thu Jan 22 11:49:49 2026] Key type .fscrypt registered [Thu Jan 22 11:49:49 2026] Key type fscrypt-provisioning registered [Thu Jan 22 11:49:49 2026] Key type big_key registered [Thu Jan 22 11:49:49 2026] Key type encrypted registered [Thu Jan 22 11:49:49 2026] ima: No TPM chip found, activating TPM-bypass! [Thu Jan 22 11:49:49 2026] Loading compiled-in module X.509 certificates [Thu Jan 22 11:49:49 2026] Loaded X.509 cert 'The CentOS Project: CentOS Stream kernel signing key: 04453f216699002fd63185eeab832de990bee6d7' [Thu Jan 22 11:49:49 2026] ima: Allocated hash algorithm: sha256 [Thu Jan 22 11:49:49 2026] ima: No architecture policies found [Thu Jan 22 11:49:49 2026] evm: Initialising EVM extended attributes: [Thu Jan 22 11:49:49 2026] evm: security.selinux [Thu Jan 22 11:49:49 2026] evm: security.SMACK64 (disabled) [Thu Jan 22 11:49:49 2026] evm: security.SMACK64EXEC (disabled) [Thu Jan 22 11:49:49 2026] evm: security.SMACK64TRANSMUTE (disabled) [Thu Jan 22 11:49:49 2026] evm: security.SMACK64MMAP (disabled) [Thu Jan 22 11:49:49 2026] evm: security.apparmor (disabled) [Thu Jan 22 11:49:49 2026] evm: security.ima [Thu Jan 22 11:49:49 2026] evm: security.capability [Thu Jan 22 11:49:49 2026] evm: HMAC attrs: 0x1 [Thu Jan 22 11:49:49 2026] usb 1-1: new full-speed USB device number 2 using uhci_hcd [Thu Jan 22 11:49:49 2026] Running certificate verification RSA selftest [Thu Jan 22 11:49:49 2026] Loaded X.509 cert 'Certificate verification self-testing key: f58703bb33ce1b73ee02eccdee5b8817518fe3db' [Thu Jan 22 11:49:49 2026] Running certificate verification ECDSA selftest [Thu Jan 22 11:49:49 2026] Loaded X.509 cert 'Certificate verification ECDSA self-testing key: 2900bcea1deb7bc8479a84a23d758efdfdd2b2d3' [Thu Jan 22 11:49:49 2026] clk: Disabling unused clocks [Thu Jan 22 11:49:49 2026] Freeing unused decrypted memory: 2028K [Thu Jan 22 11:49:49 2026] Freeing unused kernel image (initmem) memory: 4200K [Thu Jan 22 11:49:49 2026] Write protecting the kernel read-only data: 30720k [Thu Jan 22 11:49:49 2026] usb 1-1: New USB device found, idVendor=0627, idProduct=0001, bcdDevice= 0.00 [Thu Jan 22 11:49:49 2026] usb 1-1: New USB device strings: Mfr=1, Product=3, SerialNumber=10 [Thu Jan 22 11:49:49 2026] usb 1-1: Product: QEMU USB Tablet [Thu Jan 22 11:49:49 2026] usb 1-1: Manufacturer: QEMU [Thu Jan 22 11:49:49 2026] usb 1-1: SerialNumber: 28754-0000:00:01.2-1 [Thu Jan 22 11:49:49 2026] Freeing unused kernel image (rodata/data gap) memory: 420K [Thu Jan 22 11:49:49 2026] input: QEMU QEMU USB Tablet as /devices/pci0000:00/0000:00:01.2/usb1/1-1/1-1:1.0/0003:0627:0001.0001/input/input5 [Thu Jan 22 11:49:49 2026] hid-generic 0003:0627:0001.0001: input,hidraw0: USB HID v0.01 Mouse [QEMU QEMU USB Tablet] on usb-0000:00:01.2-1/input0 [Thu Jan 22 11:49:49 2026] x86/mm: Checked W+X mappings: passed, no W+X pages found. [Thu Jan 22 11:49:49 2026] Run /init as init process [Thu Jan 22 11:49:49 2026] with arguments: [Thu Jan 22 11:49:49 2026] /init [Thu Jan 22 11:49:49 2026] with environment: [Thu Jan 22 11:49:49 2026] HOME=/ [Thu Jan 22 11:49:49 2026] TERM=linux [Thu Jan 22 11:49:49 2026] BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-661.el9.x86_64 [Thu Jan 22 11:49:49 2026] systemd[1]: systemd 252-64.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified) [Thu Jan 22 11:49:49 2026] systemd[1]: Detected virtualization kvm. [Thu Jan 22 11:49:49 2026] systemd[1]: Detected architecture x86-64. [Thu Jan 22 11:49:49 2026] systemd[1]: Running in initrd. [Thu Jan 22 11:49:49 2026] systemd[1]: No hostname configured, using default hostname. [Thu Jan 22 11:49:49 2026] systemd[1]: Hostname set to . [Thu Jan 22 11:49:49 2026] systemd[1]: Initializing machine ID from VM UUID. [Thu Jan 22 11:49:49 2026] systemd[1]: Queued start job for default target Initrd Default Target. [Thu Jan 22 11:49:49 2026] systemd[1]: Started Dispatch Password Requests to Console Directory Watch. [Thu Jan 22 11:49:49 2026] systemd[1]: Reached target Local Encrypted Volumes. [Thu Jan 22 11:49:49 2026] systemd[1]: Reached target Initrd /usr File System. [Thu Jan 22 11:49:49 2026] systemd[1]: Reached target Local File Systems. [Thu Jan 22 11:49:49 2026] systemd[1]: Reached target Path Units. [Thu Jan 22 11:49:49 2026] systemd[1]: Reached target Slice Units. [Thu Jan 22 11:49:49 2026] systemd[1]: Reached target Swaps. [Thu Jan 22 11:49:49 2026] systemd[1]: Reached target Timer Units. [Thu Jan 22 11:49:49 2026] systemd[1]: Listening on D-Bus System Message Bus Socket. [Thu Jan 22 11:49:49 2026] systemd[1]: Listening on Journal Socket (/dev/log). [Thu Jan 22 11:49:49 2026] systemd[1]: Listening on Journal Socket. [Thu Jan 22 11:49:49 2026] systemd[1]: Listening on udev Control Socket. [Thu Jan 22 11:49:50 2026] systemd[1]: Listening on udev Kernel Socket. [Thu Jan 22 11:49:50 2026] systemd[1]: Reached target Socket Units. [Thu Jan 22 11:49:50 2026] systemd[1]: Starting Create List of Static Device Nodes... [Thu Jan 22 11:49:50 2026] systemd[1]: Starting Journal Service... [Thu Jan 22 11:49:50 2026] systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met. [Thu Jan 22 11:49:50 2026] systemd[1]: Starting Apply Kernel Variables... [Thu Jan 22 11:49:50 2026] systemd[1]: Starting Create System Users... [Thu Jan 22 11:49:50 2026] systemd[1]: Starting Setup Virtual Console... [Thu Jan 22 11:49:50 2026] systemd[1]: Finished Create List of Static Device Nodes. [Thu Jan 22 11:49:50 2026] systemd[1]: Finished Apply Kernel Variables. [Thu Jan 22 11:49:50 2026] systemd[1]: Finished Create System Users. [Thu Jan 22 11:49:50 2026] systemd[1]: Started Journal Service. [Thu Jan 22 11:49:50 2026] device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log. [Thu Jan 22 11:49:50 2026] device-mapper: uevent: version 1.0.3 [Thu Jan 22 11:49:50 2026] device-mapper: ioctl: 4.50.0-ioctl (2025-04-28) initialised: dm-devel@lists.linux.dev [Thu Jan 22 11:49:50 2026] RPC: Registered named UNIX socket transport module. [Thu Jan 22 11:49:50 2026] RPC: Registered udp transport module. [Thu Jan 22 11:49:50 2026] RPC: Registered tcp transport module. [Thu Jan 22 11:49:50 2026] RPC: Registered tcp-with-tls transport module. [Thu Jan 22 11:49:50 2026] RPC: Registered tcp NFSv4.1 backchannel transport module. [Thu Jan 22 11:49:50 2026] virtio_blk virtio2: 8/0/0 default/read/poll queues [Thu Jan 22 11:49:51 2026] libata version 3.00 loaded. [Thu Jan 22 11:49:51 2026] virtio_blk virtio2: [vda] 167772160 512-byte logical blocks (85.9 GB/80.0 GiB) [Thu Jan 22 11:49:51 2026] vda: vda1 [Thu Jan 22 11:49:51 2026] ata_piix 0000:00:01.1: version 2.13 [Thu Jan 22 11:49:51 2026] scsi host0: ata_piix [Thu Jan 22 11:49:51 2026] scsi host1: ata_piix [Thu Jan 22 11:49:51 2026] ata1: PATA max MWDMA2 cmd 0x1f0 ctl 0x3f6 bmdma 0xc140 irq 14 lpm-pol 0 [Thu Jan 22 11:49:51 2026] ata2: PATA max MWDMA2 cmd 0x170 ctl 0x376 bmdma 0xc148 irq 15 lpm-pol 0 [Thu Jan 22 11:49:51 2026] ata1: found unknown device (class 0) [Thu Jan 22 11:49:51 2026] ata1.00: ATAPI: QEMU DVD-ROM, 2.5+, max UDMA/100 [Thu Jan 22 11:49:51 2026] scsi 0:0:0:0: CD-ROM QEMU QEMU DVD-ROM 2.5+ PQ: 0 ANSI: 5 [Thu Jan 22 11:49:51 2026] scsi 0:0:0:0: Attached scsi generic sg0 type 5 [Thu Jan 22 11:49:51 2026] sr 0:0:0:0: [sr0] scsi3-mmc drive: 4x/4x cd/rw xa/form2 tray [Thu Jan 22 11:49:51 2026] cdrom: Uniform CD-ROM driver Revision: 3.20 [Thu Jan 22 11:49:51 2026] sr 0:0:0:0: Attached scsi CD-ROM sr0 [Thu Jan 22 11:49:51 2026] SGI XFS with ACLs, security attributes, scrub, quota, no debug enabled [Thu Jan 22 11:49:51 2026] XFS (vda1): Mounting V5 Filesystem 22ac9141-3960-4912-b20e-19fc8a328d40 [Thu Jan 22 11:49:51 2026] XFS (vda1): Ending clean mount [Thu Jan 22 11:49:52 2026] systemd-journald[309]: Received SIGTERM from PID 1 (systemd). [Thu Jan 22 11:49:52 2026] audit: type=1404 audit(1769082592.564:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1 [Thu Jan 22 11:49:52 2026] SELinux: policy capability network_peer_controls=1 [Thu Jan 22 11:49:52 2026] SELinux: policy capability open_perms=1 [Thu Jan 22 11:49:52 2026] SELinux: policy capability extended_socket_class=1 [Thu Jan 22 11:49:52 2026] SELinux: policy capability always_check_network=0 [Thu Jan 22 11:49:52 2026] SELinux: policy capability cgroup_seclabel=1 [Thu Jan 22 11:49:52 2026] SELinux: policy capability nnp_nosuid_transition=1 [Thu Jan 22 11:49:52 2026] SELinux: policy capability genfs_seclabel_symlinks=1 [Thu Jan 22 11:49:52 2026] audit: type=1403 audit(1769082592.730:3): auid=4294967295 ses=4294967295 lsm=selinux res=1 [Thu Jan 22 11:49:52 2026] systemd[1]: Successfully loaded SELinux policy in 169.604ms. [Thu Jan 22 11:49:52 2026] systemd[1]: Relabelled /dev, /dev/shm, /run, /sys/fs/cgroup in 27.039ms. [Thu Jan 22 11:49:52 2026] systemd[1]: systemd 252-64.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified) [Thu Jan 22 11:49:52 2026] systemd[1]: Detected virtualization kvm. [Thu Jan 22 11:49:52 2026] systemd[1]: Detected architecture x86-64. [Thu Jan 22 11:49:52 2026] systemd-rc-local-generator[637]: /etc/rc.d/rc.local is not marked executable, skipping. [Thu Jan 22 11:49:52 2026] systemd[1]: initrd-switch-root.service: Deactivated successfully. [Thu Jan 22 11:49:52 2026] systemd[1]: Stopped Switch Root. [Thu Jan 22 11:49:52 2026] systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1. [Thu Jan 22 11:49:52 2026] systemd[1]: Created slice Slice /system/getty. [Thu Jan 22 11:49:52 2026] systemd[1]: Created slice Slice /system/serial-getty. [Thu Jan 22 11:49:52 2026] systemd[1]: Created slice Slice /system/sshd-keygen. [Thu Jan 22 11:49:52 2026] systemd[1]: Created slice User and Session Slice. [Thu Jan 22 11:49:52 2026] systemd[1]: Started Dispatch Password Requests to Console Directory Watch. [Thu Jan 22 11:49:52 2026] systemd[1]: Started Forward Password Requests to Wall Directory Watch. [Thu Jan 22 11:49:52 2026] systemd[1]: Set up automount Arbitrary Executable File Formats File System Automount Point. [Thu Jan 22 11:49:52 2026] systemd[1]: Reached target Local Encrypted Volumes. [Thu Jan 22 11:49:52 2026] systemd[1]: Stopped target Switch Root. [Thu Jan 22 11:49:52 2026] systemd[1]: Stopped target Initrd File Systems. [Thu Jan 22 11:49:52 2026] systemd[1]: Stopped target Initrd Root File System. [Thu Jan 22 11:49:52 2026] systemd[1]: Reached target Local Integrity Protected Volumes. [Thu Jan 22 11:49:52 2026] systemd[1]: Reached target Path Units. [Thu Jan 22 11:49:52 2026] systemd[1]: Reached target rpc_pipefs.target. [Thu Jan 22 11:49:52 2026] systemd[1]: Reached target Slice Units. [Thu Jan 22 11:49:52 2026] systemd[1]: Reached target Swaps. [Thu Jan 22 11:49:52 2026] systemd[1]: Reached target Local Verity Protected Volumes. [Thu Jan 22 11:49:52 2026] systemd[1]: Listening on RPCbind Server Activation Socket. [Thu Jan 22 11:49:52 2026] systemd[1]: Reached target RPC Port Mapper. [Thu Jan 22 11:49:52 2026] systemd[1]: Listening on Process Core Dump Socket. [Thu Jan 22 11:49:52 2026] systemd[1]: Listening on initctl Compatibility Named Pipe. [Thu Jan 22 11:49:52 2026] systemd[1]: Listening on udev Control Socket. [Thu Jan 22 11:49:52 2026] systemd[1]: Listening on udev Kernel Socket. [Thu Jan 22 11:49:53 2026] systemd[1]: Mounting Huge Pages File System... [Thu Jan 22 11:49:53 2026] systemd[1]: Mounting POSIX Message Queue File System... [Thu Jan 22 11:49:53 2026] systemd[1]: Mounting Kernel Debug File System... [Thu Jan 22 11:49:53 2026] systemd[1]: Mounting Kernel Trace File System... [Thu Jan 22 11:49:53 2026] systemd[1]: Kernel Module supporting RPCSEC_GSS was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Create List of Static Device Nodes... [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Load Kernel Module configfs... [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Load Kernel Module drm... [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Load Kernel Module efi_pstore... [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Load Kernel Module fuse... [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Read and set NIS domainname from /etc/sysconfig/network... [Thu Jan 22 11:49:53 2026] systemd[1]: systemd-fsck-root.service: Deactivated successfully. [Thu Jan 22 11:49:53 2026] systemd[1]: Stopped File System Check on Root Device. [Thu Jan 22 11:49:53 2026] systemd[1]: Stopped Journal Service. [Thu Jan 22 11:49:53 2026] fuse: init (API version 7.37) [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Journal Service... [Thu Jan 22 11:49:53 2026] systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met. [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Generate network units from Kernel command line... [Thu Jan 22 11:49:53 2026] systemd[1]: TPM2 PCR Machine ID Measurement was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Remount Root and Kernel File Systems... [Thu Jan 22 11:49:53 2026] systemd[1]: Repartition Root Disk was skipped because no trigger condition checks were met. [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Apply Kernel Variables... [Thu Jan 22 11:49:53 2026] systemd[1]: Starting Coldplug All udev Devices... [Thu Jan 22 11:49:53 2026] systemd[1]: Mounted Huge Pages File System. [Thu Jan 22 11:49:53 2026] systemd[1]: Started Journal Service. [Thu Jan 22 11:49:53 2026] xfs filesystem being remounted at / supports timestamps until 2038 (0x7fffffff) [Thu Jan 22 11:49:53 2026] ACPI: bus type drm_connector registered [Thu Jan 22 11:49:53 2026] systemd-journald[679]: Received client request to flush runtime journal. [Thu Jan 22 11:49:53 2026] input: PC Speaker as /devices/platform/pcspkr/input/input6 [Thu Jan 22 11:49:53 2026] piix4_smbus 0000:00:01.3: SMBus Host Controller at 0x700, revision 0 [Thu Jan 22 11:49:53 2026] i2c i2c-0: 1/1 memory slots populated (from DMI) [Thu Jan 22 11:49:53 2026] i2c i2c-0: Memory type 0x07 not supported yet, not instantiating SPD [Thu Jan 22 11:49:54 2026] [drm] pci: virtio-vga detected at 0000:00:02.0 [Thu Jan 22 11:49:54 2026] virtio-pci 0000:00:02.0: vgaarb: deactivate vga console [Thu Jan 22 11:49:54 2026] Console: switching to colour dummy device 80x25 [Thu Jan 22 11:49:54 2026] [drm] features: -virgl +edid -resource_blob -host_visible [Thu Jan 22 11:49:54 2026] [drm] features: -context_init [Thu Jan 22 11:49:54 2026] [drm] number of scanouts: 1 [Thu Jan 22 11:49:54 2026] [drm] number of cap sets: 0 [Thu Jan 22 11:49:54 2026] [drm] Initialized virtio_gpu 0.1.0 for 0000:00:02.0 on minor 0 [Thu Jan 22 11:49:54 2026] fbcon: virtio_gpudrmfb (fb0) is primary device [Thu Jan 22 11:49:54 2026] Console: switching to colour frame buffer device 128x48 [Thu Jan 22 11:49:54 2026] virtio-pci 0000:00:02.0: [drm] fb0: virtio_gpudrmfb frame buffer device [Thu Jan 22 11:49:54 2026] Warning: Deprecated Driver is detected: nft_compat will not be maintained in a future major release and may be disabled [Thu Jan 22 11:49:54 2026] Warning: Deprecated Driver is detected: nft_compat_module_init will not be maintained in a future major release and may be disabled [Thu Jan 22 11:49:54 2026] kvm_amd: TSC scaling supported [Thu Jan 22 11:49:54 2026] kvm_amd: Nested Virtualization enabled [Thu Jan 22 11:49:54 2026] kvm_amd: Nested Paging enabled [Thu Jan 22 11:49:54 2026] kvm_amd: LBR virtualization supported [Thu Jan 22 11:49:54 2026] ISO 9660 Extensions: Microsoft Joliet Level 3 [Thu Jan 22 11:49:54 2026] ISO 9660 Extensions: RRIP_1991A [Thu Jan 22 11:50:00 2026] block vda: the capability attribute has been deprecated. [Thu Jan 22 11:52:35 2026] pci 0000:00:07.0: [1af4:1000] type 00 class 0x020000 conventional PCI endpoint [Thu Jan 22 11:52:35 2026] pci 0000:00:07.0: BAR 0 [io 0x0000-0x003f] [Thu Jan 22 11:52:35 2026] pci 0000:00:07.0: BAR 1 [mem 0x00000000-0x00000fff] [Thu Jan 22 11:52:35 2026] pci 0000:00:07.0: BAR 4 [mem 0x00000000-0x00003fff 64bit pref] [Thu Jan 22 11:52:35 2026] pci 0000:00:07.0: ROM [mem 0x00000000-0x0007ffff pref] [Thu Jan 22 11:52:35 2026] pci 0000:00:07.0: ROM [mem 0xc0000000-0xc007ffff pref]: assigned [Thu Jan 22 11:52:35 2026] pci 0000:00:07.0: BAR 4 [mem 0x240000000-0x240003fff 64bit pref]: assigned [Thu Jan 22 11:52:35 2026] pci 0000:00:07.0: BAR 1 [mem 0xc0080000-0xc0080fff]: assigned [Thu Jan 22 11:52:35 2026] pci 0000:00:07.0: BAR 0 [io 0x1000-0x103f]: assigned [Thu Jan 22 11:52:35 2026] virtio-pci 0000:00:07.0: enabling device (0000 -> 0003) [Thu Jan 22 11:52:51 2026] 8021q: 802.1Q VLAN Support v1.8 [Thu Jan 22 11:52:51 2026] 8021q: adding VLAN 0 to HW filter on device eth0 [Thu Jan 22 11:52:51 2026] 8021q: adding VLAN 0 to HW filter on device eth1 [Thu Jan 22 11:59:38 2026] systemd-rc-local-generator[9266]: /etc/rc.d/rc.local is not marked executable, skipping. [Thu Jan 22 12:00:07 2026] SELinux: Converting 389 SID table entries... [Thu Jan 22 12:00:07 2026] SELinux: policy capability network_peer_controls=1 [Thu Jan 22 12:00:07 2026] SELinux: policy capability open_perms=1 [Thu Jan 22 12:00:07 2026] SELinux: policy capability extended_socket_class=1 [Thu Jan 22 12:00:07 2026] SELinux: policy capability always_check_network=0 [Thu Jan 22 12:00:07 2026] SELinux: policy capability cgroup_seclabel=1 [Thu Jan 22 12:00:07 2026] SELinux: policy capability nnp_nosuid_transition=1 [Thu Jan 22 12:00:07 2026] SELinux: policy capability genfs_seclabel_symlinks=1 [Thu Jan 22 12:00:20 2026] SELinux: Converting 390 SID table entries... [Thu Jan 22 12:00:20 2026] SELinux: policy capability network_peer_controls=1 [Thu Jan 22 12:00:20 2026] SELinux: policy capability open_perms=1 [Thu Jan 22 12:00:20 2026] SELinux: policy capability extended_socket_class=1 [Thu Jan 22 12:00:20 2026] SELinux: policy capability always_check_network=0 [Thu Jan 22 12:00:20 2026] SELinux: policy capability cgroup_seclabel=1 [Thu Jan 22 12:00:20 2026] SELinux: policy capability nnp_nosuid_transition=1 [Thu Jan 22 12:00:20 2026] SELinux: policy capability genfs_seclabel_symlinks=1 [Thu Jan 22 12:00:43 2026] systemd-rc-local-generator[10307]: /etc/rc.d/rc.local is not marked executable, skipping. [Thu Jan 22 12:00:46 2026] evm: overlay not supported home/zuul/zuul-output/logs/selinux-denials.log0000644000000000000000000000000015134437406020610 0ustar rootroothome/zuul/zuul-output/logs/system-config/0000755000175000017500000000000015134437413017662 5ustar zuulzuulhome/zuul/zuul-output/logs/system-config/libvirt/0000755000175000017500000000000015134437413021335 5ustar zuulzuulhome/zuul/zuul-output/logs/system-config/libvirt/libvirt-admin.conf0000644000175000000000000000070215134437413024713 0ustar zuulroot# # This can be used to setup URI aliases for frequently # used connection URIs. Aliases may contain only the # characters a-Z, 0-9, _, -. # # Following the '=' may be any valid libvirt admin connection # URI, including arbitrary parameters #uri_aliases = [ # "admin=libvirtd:///system", #] # This specifies the default location the client tries to connect to if no other # URI is provided by the application #uri_default = "libvirtd:///system" home/zuul/zuul-output/logs/system-config/libvirt/libvirt.conf0000644000175000000000000000104315134437413023624 0ustar zuulroot# # This can be used to setup URI aliases for frequently # used connection URIs. Aliases may contain only the # characters a-Z, 0-9, _, -. # # Following the '=' may be any valid libvirt connection # URI, including arbitrary parameters #uri_aliases = [ # "hail=qemu+ssh://root@hail.cloud.example.com/system", # "sleet=qemu+ssh://root@sleet.cloud.example.com/system", #] # # These can be used in cases when no URI is supplied by the application # (@uri_default also prevents probing of the hypervisor driver). # #uri_default = "qemu:///system" home/zuul/zuul-output/logs/registries.conf0000644000000000000000000000744715134437413020054 0ustar rootroot# For more information on this configuration file, see containers-registries.conf(5). # # NOTE: RISK OF USING UNQUALIFIED IMAGE NAMES # We recommend always using fully qualified image names including the registry # server (full dns name), namespace, image name, and tag # (e.g., registry.redhat.io/ubi8/ubi:latest). Pulling by digest (i.e., # quay.io/repository/name@digest) further eliminates the ambiguity of tags. # When using short names, there is always an inherent risk that the image being # pulled could be spoofed. For example, a user wants to pull an image named # `foobar` from a registry and expects it to come from myregistry.com. If # myregistry.com is not first in the search list, an attacker could place a # different `foobar` image at a registry earlier in the search list. The user # would accidentally pull and run the attacker's image and code rather than the # intended content. We recommend only adding registries which are completely # trusted (i.e., registries which don't allow unknown or anonymous users to # create accounts with arbitrary names). This will prevent an image from being # spoofed, squatted or otherwise made insecure. If it is necessary to use one # of these registries, it should be added at the end of the list. # # # An array of host[:port] registries to try when pulling an unqualified image, in order. unqualified-search-registries = ["registry.access.redhat.com", "registry.redhat.io", "docker.io"] # [[registry]] # # The "prefix" field is used to choose the relevant [[registry]] TOML table; # # (only) the TOML table with the longest match for the input image name # # (taking into account namespace/repo/tag/digest separators) is used. # # # # The prefix can also be of the form: *.example.com for wildcard subdomain # # matching. # # # # If the prefix field is missing, it defaults to be the same as the "location" field. # prefix = "example.com/foo" # # # If true, unencrypted HTTP as well as TLS connections with untrusted # # certificates are allowed. # insecure = false # # # If true, pulling images with matching names is forbidden. # blocked = false # # # The physical location of the "prefix"-rooted namespace. # # # # By default, this is equal to "prefix" (in which case "prefix" can be omitted # # and the [[registry]] TOML table can only specify "location"). # # # # Example: Given # # prefix = "example.com/foo" # # location = "internal-registry-for-example.net/bar" # # requests for the image example.com/foo/myimage:latest will actually work with the # # internal-registry-for-example.net/bar/myimage:latest image. # # # The location can be empty iff prefix is in a # # wildcarded format: "*.example.com". In this case, the input reference will # # be used as-is without any rewrite. # location = internal-registry-for-example.com/bar" # # # (Possibly-partial) mirrors for the "prefix"-rooted namespace. # # # # The mirrors are attempted in the specified order; the first one that can be # # contacted and contains the image will be used (and if none of the mirrors contains the image, # # the primary location specified by the "registry.location" field, or using the unmodified # # user-specified reference, is tried last). # # # # Each TOML table in the "mirror" array can contain the following fields, with the same semantics # # as if specified in the [[registry]] TOML table directly: # # - location # # - insecure # [[registry.mirror]] # location = "example-mirror-0.local/mirror-for-foo" # [[registry.mirror]] # location = "example-mirror-1.local/mirrors/foo" # insecure = true # # Given the above, a pull of example.com/foo/image:latest will try: # # 1. example-mirror-0.local/mirror-for-foo/image:latest # # 2. example-mirror-1.local/mirrors/foo/image:latest # # 3. internal-registry-for-example.net/bar/image:latest # # in order, and use the first one that exists. short-name-mode = "enforcing" home/zuul/zuul-output/logs/registries.conf.d/0000755000175000000000000000000015134437413020370 5ustar zuulroothome/zuul/zuul-output/logs/registries.conf.d/000-shortnames.conf0000644000175000000000000001735515134437413023732 0ustar zuulroot[aliases] # almalinux "almalinux" = "docker.io/library/almalinux" "almalinux-minimal" = "docker.io/library/almalinux-minimal" # Amazon Linux "amazonlinux" = "public.ecr.aws/amazonlinux/amazonlinux" # Arch Linux "archlinux" = "docker.io/library/archlinux" # centos "centos" = "quay.io/centos/centos" # containers "skopeo" = "quay.io/skopeo/stable" "buildah" = "quay.io/buildah/stable" "podman" = "quay.io/podman/stable" "hello" = "quay.io/podman/hello" "hello-world" = "quay.io/podman/hello" # docker "alpine" = "docker.io/library/alpine" "docker" = "docker.io/library/docker" "registry" = "docker.io/library/registry" "swarm" = "docker.io/library/swarm" # Fedora "fedora-bootc" = "registry.fedoraproject.org/fedora-bootc" "fedora-minimal" = "registry.fedoraproject.org/fedora-minimal" "fedora" = "registry.fedoraproject.org/fedora" # Gentoo "gentoo" = "docker.io/gentoo/stage3" # openSUSE "opensuse/tumbleweed" = "registry.opensuse.org/opensuse/tumbleweed" "opensuse/tumbleweed-dnf" = "registry.opensuse.org/opensuse/tumbleweed-dnf" "opensuse/tumbleweed-microdnf" = "registry.opensuse.org/opensuse/tumbleweed-microdnf" "opensuse/leap" = "registry.opensuse.org/opensuse/leap" "opensuse/busybox" = "registry.opensuse.org/opensuse/busybox" "tumbleweed" = "registry.opensuse.org/opensuse/tumbleweed" "tumbleweed-dnf" = "registry.opensuse.org/opensuse/tumbleweed-dnf" "tumbleweed-microdnf" = "registry.opensuse.org/opensuse/tumbleweed-microdnf" "leap" = "registry.opensuse.org/opensuse/leap" "leap-dnf" = "registry.opensuse.org/opensuse/leap-dnf" "leap-microdnf" = "registry.opensuse.org/opensuse/leap-microdnf" "tw-busybox" = "registry.opensuse.org/opensuse/busybox" # OTel (Open Telemetry) - opentelemetry.io "otel/autoinstrumentation-go" = "docker.io/otel/autoinstrumentation-go" "otel/autoinstrumentation-nodejs" = "docker.io/otel/autoinstrumentation-nodejs" "otel/autoinstrumentation-python" = "docker.io/otel/autoinstrumentation-python" "otel/autoinstrumentation-java" = "docker.io/otel/autoinstrumentation-java" "otel/autoinstrumentation-dotnet" = "docker.io/otel/autoinstrumentation-dotnet" "otel/opentelemetry-collector" = "docker.io/otel/opentelemetry-collector" "otel/opentelemetry-collector-contrib" = "docker.io/otel/opentelemetry-collector-contrib" "otel/opentelemetry-collector-contrib-dev" = "docker.io/otel/opentelemetry-collector-contrib-dev" "otel/opentelemetry-collector-k8s" = "docker.io/otel/opentelemetry-collector-k8s" "otel/opentelemetry-operator" = "docker.io/otel/opentelemetry-operator" "otel/opentelemetry-operator-bundle" = "docker.io/otel/opentelemetry-operator-bundle" "otel/operator-opamp-bridge" = "docker.io/otel/operator-opamp-bridge" "otel/semconvgen" = "docker.io/otel/semconvgen" "otel/weaver" = "docker.io/otel/weaver" # SUSE "suse/sle15" = "registry.suse.com/suse/sle15" "suse/sles12sp5" = "registry.suse.com/suse/sles12sp5" "suse/sles12sp4" = "registry.suse.com/suse/sles12sp4" "suse/sles12sp3" = "registry.suse.com/suse/sles12sp3" "sle15" = "registry.suse.com/suse/sle15" "sles12sp5" = "registry.suse.com/suse/sles12sp5" "sles12sp4" = "registry.suse.com/suse/sles12sp4" "sles12sp3" = "registry.suse.com/suse/sles12sp3" "bci-base" = "registry.suse.com/bci/bci-base" "bci/bci-base" = "registry.suse.com/bci/bci-base" "bci-micro" = "registry.suse.com/bci/bci-micro" "bci/bci-micro" = "registry.suse.com/bci/bci-micro" "bci-minimal" = "registry.suse.com/bci/bci-minimal" "bci/bci-minimal" = "registry.suse.com/bci/bci-minimal" "bci-busybox" = "registry.suse.com/bci/bci-busybox" "bci/bci-busybox" = "registry.suse.com/bci/bci-busybox" # Red Hat Enterprise Linux "rhel" = "registry.access.redhat.com/rhel" "rhel6" = "registry.access.redhat.com/rhel6" "rhel7" = "registry.access.redhat.com/rhel7" "rhel7.9" = "registry.access.redhat.com/rhel7.9" "rhel-atomic" = "registry.access.redhat.com/rhel-atomic" "rhel9-bootc" = "registry.redhat.io/rhel9/rhel-bootc" "rhel-minimal" = "registry.access.redhat.com/rhel-minimal" "rhel-init" = "registry.access.redhat.com/rhel-init" "rhel7-atomic" = "registry.access.redhat.com/rhel7-atomic" "rhel7-minimal" = "registry.access.redhat.com/rhel7-minimal" "rhel7-init" = "registry.access.redhat.com/rhel7-init" "rhel7/rhel" = "registry.access.redhat.com/rhel7/rhel" "rhel7/rhel-atomic" = "registry.access.redhat.com/rhel7/rhel7/rhel-atomic" "ubi7/ubi" = "registry.access.redhat.com/ubi7/ubi" "ubi7/ubi-minimal" = "registry.access.redhat.com/ubi7-minimal" "ubi7/ubi-init" = "registry.access.redhat.com/ubi7-init" "ubi7" = "registry.access.redhat.com/ubi7" "ubi7-init" = "registry.access.redhat.com/ubi7-init" "ubi7-minimal" = "registry.access.redhat.com/ubi7-minimal" "rhel8" = "registry.access.redhat.com/ubi8" "rhel8-init" = "registry.access.redhat.com/ubi8-init" "rhel8-minimal" = "registry.access.redhat.com/ubi8-minimal" "rhel8-micro" = "registry.access.redhat.com/ubi8-micro" "ubi8" = "registry.access.redhat.com/ubi8" "ubi8-minimal" = "registry.access.redhat.com/ubi8-minimal" "ubi8-init" = "registry.access.redhat.com/ubi8-init" "ubi8-micro" = "registry.access.redhat.com/ubi8-micro" "ubi8/ubi" = "registry.access.redhat.com/ubi8/ubi" "ubi8/ubi-minimal" = "registry.access.redhat.com/ubi8-minimal" "ubi8/ubi-init" = "registry.access.redhat.com/ubi8-init" "ubi8/ubi-micro" = "registry.access.redhat.com/ubi8-micro" "ubi8/podman" = "registry.access.redhat.com/ubi8/podman" "ubi8/buildah" = "registry.access.redhat.com/ubi8/buildah" "ubi8/skopeo" = "registry.access.redhat.com/ubi8/skopeo" "rhel9" = "registry.access.redhat.com/ubi9" "rhel9-init" = "registry.access.redhat.com/ubi9-init" "rhel9-minimal" = "registry.access.redhat.com/ubi9-minimal" "rhel9-micro" = "registry.access.redhat.com/ubi9-micro" "ubi9" = "registry.access.redhat.com/ubi9" "ubi9-minimal" = "registry.access.redhat.com/ubi9-minimal" "ubi9-init" = "registry.access.redhat.com/ubi9-init" "ubi9-micro" = "registry.access.redhat.com/ubi9-micro" "ubi9/ubi" = "registry.access.redhat.com/ubi9/ubi" "ubi9/ubi-minimal" = "registry.access.redhat.com/ubi9-minimal" "ubi9/ubi-init" = "registry.access.redhat.com/ubi9-init" "ubi9/ubi-micro" = "registry.access.redhat.com/ubi9-micro" "ubi9/podman" = "registry.access.redhat.com/ubi9/podman" "ubi9/buildah" = "registry.access.redhat.com/ubi9/buildah" "ubi9/skopeo" = "registry.access.redhat.com/ubi9/skopeo" # Rocky Linux "rockylinux" = "quay.io/rockylinux/rockylinux" # Debian "debian" = "docker.io/library/debian" # Kali Linux "kali-bleeding-edge" = "docker.io/kalilinux/kali-bleeding-edge" "kali-dev" = "docker.io/kalilinux/kali-dev" "kali-experimental" = "docker.io/kalilinux/kali-experimental" "kali-last-release" = "docker.io/kalilinux/kali-last-release" "kali-rolling" = "docker.io/kalilinux/kali-rolling" # Ubuntu "ubuntu" = "docker.io/library/ubuntu" # Oracle Linux "oraclelinux" = "container-registry.oracle.com/os/oraclelinux" # busybox "busybox" = "docker.io/library/busybox" # golang "golang" = "docker.io/library/golang" # php "php" = "docker.io/library/php" # python "python" = "docker.io/library/python" # rust "rust" = "docker.io/library/rust" # node "node" = "docker.io/library/node" # Grafana Labs "grafana/agent" = "docker.io/grafana/agent" "grafana/grafana" = "docker.io/grafana/grafana" "grafana/k6" = "docker.io/grafana/k6" "grafana/loki" = "docker.io/grafana/loki" "grafana/mimir" = "docker.io/grafana/mimir" "grafana/oncall" = "docker.io/grafana/oncall" "grafana/pyroscope" = "docker.io/grafana/pyroscope" "grafana/tempo" = "docker.io/grafana/tempo" # curl "curl" = "quay.io/curl/curl" # nginx "nginx" = "docker.io/library/nginx" # QUBIP "qubip/pq-container" = "quay.io/qubip/pq-container" home/zuul/zuul-output/artifacts/0000755000175000017500000000000015134407453016110 5ustar zuulzuulhome/zuul/zuul-output/docs/0000755000175000017500000000000015134407453015060 5ustar zuulzuul