Newer
Older
# NOTE(yoctozepto): ensure we pick up fact changes from pre
- name: Refresh facts
setup:
# NOTE(yoctozepto): setting vars as facts for all to have them around in all the plays
- name: set facts for commonly used variables
vars:
# NOTE(yoctozepto): needed here to use in other facts too
openstack_core_enabled: "{{ scenario not in ['bifrost', 'mariadb', 'prometheus-efk', 'monasca'] }}"
kolla_inventory_path: "/etc/kolla/inventory"
logs_dir: "/tmp/logs"
kolla_ansible_src_dir: "{{ ansible_env.PWD }}/src/{{ zuul.project.canonical_hostname }}/openstack/kolla-ansible"
kolla_ansible_local_src_dir: "{{ zuul.executor.work_root }}/src/{{ zuul.project.canonical_hostname }}/openstack/kolla-ansible"
infra_dockerhub_mirror: "http://{{ zuul_site_mirror_fqdn }}:8082/"
ceph_ansible_src_dir: "{{ ansible_env.PWD }}/src/github.com/ceph/ceph-ansible"
need_build_image: false
build_image_tag: "change_{{ zuul.change | default('none') }}"
openstack_core_enabled: "{{ openstack_core_enabled }}"
openstack_core_tested: "{{ scenario in ['core', 'ceph-ansible', 'zun', 'cells', 'swift', 'linuxbridge', 'ovn'] }}"
dashboard_enabled: "{{ openstack_core_enabled or scenario in ['monasca'] }}"
upper_constraints_file: "{{ ansible_env.HOME }}/src/opendev.org/openstack/requirements/upper-constraints.txt"
docker_image_tag_suffix: "{{ '-aarch64' if ansible_architecture == 'aarch64' else '' }}"
pip_user_path_env:
PATH: "{{ ansible_env.HOME + '/.local/bin:' + ansible_env.PATH }}"
- name: Install dig for Designate testing
become: true
package:
name: "{{ 'bind-utils' if ansible_os_family == 'RedHat' else 'dnsutils' }}"
when: scenario == 'magnum'
- name: Install xfsprogs package for Swift filesystems
become: true
package:
name: xfsprogs
when: scenario == 'swift'
- name: Prepare disks for a storage service
disk_type: "{{ ceph_storetype if scenario in ['ceph-ansible'] else scenario }}"
ceph_storetype: "{{ hostvars[inventory_hostname].get('ceph_osd_storetype') }}"
vars:
kolla_build_config:
DEFAULT:
profile: gate
logs_dir: /tmp/logs/build
# NOTE(yoctozepto): we cannot build and push at the same time on debian
# buster see https://github.com/docker/for-linux/issues/711.
push: "{{ base_distro != 'debian' }}"
base: "{{ base_distro }}"
install_type: "{{ install_type }}"
registry: "127.0.0.1:4000"
namespace: lokolla
tag: "{{ build_image_tag }}"
template_override: /etc/kolla/template_overrides.j2
# NOTE(yoctozepto): to avoid issues with IPv6 not enabled in the docker daemon
# and since we don't need isolated networks here, use host networking
network_mode: host
environment: "{{ pip_user_path_env }}"
- name: detect whether need build images
set_fact:
need_build_image: true
when:
# NOTE(yoctozepto): if there is any tested change that does not belong to kolla-ansible,
# then kolla images should be built
- item.project.short_name != "kolla-ansible"
with_items: "{{ zuul['items'] }}"
# NOTE(yoctozepto): required to template template_overrides.j2 for Zuul
- name: Include kolla Zuul vars if building new images
include_vars:
file: "{{ zuul.executor.work_root }}/src/opendev.org/openstack/kolla/tests/vars/zuul.yml"
when: need_build_image
- block:
# NOTE(mgoddard): This only affects the remote copy of the repo, not the
# one on the executor.
- name: checkout the previous kolla-ansible branch
shell:
cmd: |
git checkout stable/{{ previous_release | lower }}
echo "kolla-ansible checked out to:"
git log --pretty=oneline -1
chdir: "{{ kolla_ansible_src_dir }}"
- name: checkout the previous requirements branch
shell:
cmd: |
git checkout stable/{{ previous_release | lower }}
echo "requirements checked out to:"
git log --pretty=oneline -1
chdir: "{{ ansible_env.HOME }}/src/opendev.org/openstack/requirements/"
state: "directory"
mode: 0777
become: true
- import_role:
name: kolla-build-config
when: need_build_image
# NOTE(yoctozepto): required to customize kolla to use local mirrors
- name: Template template_overrides.j2
template:
src: "{{ zuul.executor.work_root }}/src/opendev.org/openstack/kolla/tests/templates/template_overrides.j2"
dest: /etc/kolla/template_overrides.j2
when: need_build_image
- name: Ensure /etc/docker exists
file:
path: "/etc/docker"
state: directory
become: true
- name: Ensure configuration directories exist
file:
path: "/etc/kolla/config/{{ item }}"
state: directory
loop:
- nova
- bifrost
template:
src: "{{ kolla_ansible_local_src_dir }}/{{ item.src }}"
dest: "{{ item.dest }}"
become: "{{ item.become | default(false) }}"
with_items:
# Ansible inventory
- src: "tests/templates/inventory.j2"
dest: "{{ kolla_inventory_path }}"
# globals.yml
- src: "tests/templates/globals-default.j2"
dest: /etc/kolla/globals.yml
# nova-compute.conf
- src: "tests/templates/nova-compute-overrides.j2"
dest: /etc/kolla/config/nova/nova-compute.conf
# neutron.conf
- src: "tests/templates/neutron-server-overrides.j2"
dest: /etc/kolla/config/neutron.conf
when: "{{ openstack_core_enabled }}"
# bifrost/dib.yml
- src: "tests/templates/bifrost-dib-overrides.j2"
dest: /etc/kolla/config/bifrost/dib.yml
- src: "tests/templates/ironic-overrides.j2"
dest: /etc/kolla/config/ironic.conf
when: "{{ scenario == 'ironic' }}"
# Ceph-Ansible inventory
- src: "tests/templates/ceph-inventory.j2"
dest: /etc/kolla/ceph-inventory
when: "{{ scenario == 'ceph-ansible' }}"
# ceph-ansible.yml
- src: "tests/templates/ceph-ansible.j2"
dest: /etc/kolla/ceph-ansible.yml
when: "{{ scenario == 'ceph-ansible' }}"
- block:
- name: ensure ironic config directory exists
file:
path: /etc/kolla/config/ironic
state: directory
mode: 0777
- name: download Ironic Python Agent (IPA) images
get_url:
url: "https://tarballs.openstack.org/ironic-python-agent/tinyipa/files/{{ item.src }}"
dest: "/etc/kolla/config/ironic/{{ item.dest }}"
with_items:
- src: "tinyipa-{{ zuul.branch | replace('/', '-') }}.gz"
dest: ironic-agent.initramfs
- src: "tinyipa-{{ zuul.branch | replace('/', '-') }}.vmlinuz"
dest: ironic-agent.kernel
when: scenario == "ironic"
- name: ensure /etc/ansible exists
file:
path: /etc/ansible
state: directory
become: true
# NOTE(mgoddard): We need a recent pip to install the latest cryptography
# library. See https://github.com/pyca/cryptography/issues/5753
- name: install pip 19.1.1+
pip:
name: "pip>=19.1.1"
executable: "pip3"
extra_args: "--user"
- name: install kolla-ansible and dependencies
vars:
# Test latest ansible version on Ubuntu, minimum supported on others.
ansible_version_constraint: "==2.9.*"
name:
- "{{ kolla_ansible_src_dir }}"
- "ansible{{ ansible_version_constraint }}"
- "ara<1.0.0"
extra_args: "-c {{ upper_constraints_file }} --user"
- name: get ARA callback plugin path
command: "python3 -m ara.setup.callback_plugins"
changed_when: false
register: ara_callback_plugins
- name: template ansible.cfg
template:
src: "{{ kolla_ansible_local_src_dir }}/tests/templates/ansible.cfg.j2"
dest: /etc/ansible/ansible.cfg
src: "{{ kolla_ansible_src_dir }}/etc/kolla/passwords.yml"
- name: slurp kolla passwords
slurp:
src: /etc/kolla/passwords.yml
register: passwords_yml
- name: write out kolla SSH private key
copy:
content: "{{ (passwords_yml.content | b64decode | from_yaml).kolla_ssh_key.private_key }}"
dest: ~/.ssh/id_rsa_kolla
mode: 0600
- name: authorise kolla public key for zuul user
authorized_key:
user: "{{ ansible_env.USER }}"
key: "{{ (passwords_yml.content | b64decode | from_yaml).kolla_ssh_key.public_key }}"
# Delegate to each host in turn. If more tasks require execution on all
# hosts in future, break out into a separate play.
with_inventory_hostnames:
- all
delegate_to: "{{ item }}"
- name: Record the running state of the environment as seen by the setup module
shell:
cmd: ansible all -i {{ kolla_inventory_path }} -e ansible_user={{ ansible_user }} -m setup > /tmp/logs/ansible/initial-setup
- name: Set facts for actions
set_fact:
# NOTE(yoctozepto): no support for upgrades for now
docker_image_tag: "{{ build_image_tag if need_build_image else (zuul.branch | basename) ~ docker_image_tag_suffix }}"
docker_image_prefix: "{{ 'primary:4000/lokolla/' if need_build_image else 'kolla/' }}"
# NOTE(yoctozepto): k-a octavia-certificates should run before k-a bootstrap-servers
# because the latter hijacks /etc/kolla permissions (due to same directory on the
# same host being used by both)
- name: create TLS certificates for octavia
command: kolla-ansible octavia-certificates
when: scenario == 'magnum'
# NOTE(mgoddard): We are using the script module here and later to ensure
# we use the local copy of these scripts, rather than the one on the remote
# host, which could be checked out to a previous release (in an upgrade
# job).
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
BASE_DISTRO: "{{ base_distro }}"
BUILD_IMAGE: "{{ need_build_image }}"
KOLLA_SRC_DIR: "{{ ansible_env.HOME }}/src/opendev.org/openstack/kolla"
UPPER_CONSTRAINTS: "{{ upper_constraints_file }}"
- name: Run init-swift.sh script
script:
cmd: init-swift.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
KOLLA_SWIFT_BASE_IMAGE: "{{ docker_image_prefix }}{{ base_distro }}-{{ install_type }}-swift-base:{{ docker_image_tag }}"
# NOTE(yoctozepto): no IPv6 for now
STORAGE_NODES: "{{ groups['all'] | map('extract', hostvars,
['ansible_'+api_interface_name, 'ipv4', 'address'])
| join(' ') }}"
when: scenario == 'swift'
# At this point we have generated all necessary configuration, and are
# ready to deploy the control plane services. Control flow now depends on
# the scenario being exercised.
# Deploy ceph-ansible on ceph-ansible scenarios
- block:
- name: Run deploy-ceph-ansible.sh script
script:
cmd: deploy-ceph-ansible.sh
executable: /bin/bash
chdir: "{{ ceph_ansible_src_dir }}"
environment:
BASE_DISTRO: "{{ base_distro }}"
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
- name: Ensure required kolla config directories exist
file:
state: directory
name: "/etc/kolla/config/{{ item.name }}"
mode: 0777
with_items: "{{ ceph_ansible_services }}"
- name: copy ceph.conf to enabled services
copy:
src: "/etc/ceph/ceph.conf"
dest: "/etc/kolla/config/{{ item.name }}/ceph.conf"
remote_src: True
with_items: "{{ ceph_ansible_services }}"
- name: copy keyrings to enabled services
copy:
remote_src: True
src: "/etc/ceph/{{ item.keyring }}"
dest: "/etc/kolla/config/{{ item.name }}/{{ item.keyring }}"
with_items: "{{ ceph_ansible_services }}"
become: True
vars:
ceph_ansible_services:
- { name: 'cinder/cinder-volume', keyring: "ceph.client.cinder.keyring" }
- { name: 'cinder/cinder-backup', keyring: "ceph.client.cinder.keyring" }
- { name: 'cinder/cinder-backup', keyring: "ceph.client.cinder-backup.keyring" }
- { name: 'glance', keyring: "ceph.client.glance.keyring" }
- { name: 'nova', keyring: "ceph.client.nova.keyring" }
- { name: 'nova', keyring: "ceph.client.cinder.keyring" }
when: scenario == "ceph-ansible"
# Deploy control plane. For upgrade jobs this is the previous release.
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
# NOTE(yoctozepto): this is nice as the first step after the deployment
# because it waits for the services to stabilize well enough so that
# the dashboard is able to show the login prompt
- name: Run test-dashboard.sh script
script:
cmd: test-dashboard.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: dashboard_enabled
- name: Run init-core-openstack.sh script
script:
cmd: init-core-openstack.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
EXT_NET_CIDR: "{{ neutron_external_network_prefix }}0/{{ neutron_external_network_prefix_length }}"
EXT_NET_RANGE: "start={{ neutron_external_network_prefix }}150,end={{ neutron_external_network_prefix }}199"
EXT_NET_GATEWAY: "{{ neutron_external_network_prefix }}1"
EXT_NET_LOCAL_ADDR: "{{ neutron_external_network_prefix }}1/{{ neutron_external_network_prefix_length }}"
EXT_NET_SLAVE_DEVICE: "{{ neutron_external_interface_name }}"
SCENARIO: "{{ scenario }}"
when: openstack_core_tested or scenario in ['ironic', 'magnum', 'scenario_nfv', 'zun']
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
SCENARIO: "{{ scenario }}"
when: openstack_core_tested
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run test-swift.sh script
script:
cmd: test-swift.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == 'swift'
- name: Run test-scenario-nfv.sh script
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "scenario_nfv"
- name: Run test-ironic.sh script
script:
cmd: test-ironic.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
TLS_ENABLED: "{{ tls_enabled }}"
- name: Run test-magnum.sh script
script:
cmd: test-magnum.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "magnum"
- name: Run test-monasca.sh script
script:
cmd: test-monasca.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "monasca"
- name: Run test-masakari.sh script
script:
cmd: test-masakari.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "masakari"
- name: Run test-ovn.sh script
script:
cmd: test-ovn.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "ovn"
- name: Run test-mariadb.sh script
script:
cmd: test-mariadb.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "mariadb"
- name: Run test-prometheus-efk.sh script
script:
cmd: test-prometheus-efk.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "prometheus-efk"
# NOTE(yoctozepto): each host checks itself
- hosts: all
tasks:
- name: Pre-upgrade sanity checks
block:
- name: Run pre-upgrade check-failure.sh script
shell:
cmd: tests/check-failure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run pre-upgrade check-config.sh script
shell:
cmd: tests/check-config.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
# Using script rather than shell here because check-logs.sh does not
# exist in Stein branch.
- name: Run check-logs.sh script
script:
cmd: check-logs.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment: "{{ pip_user_path_env }}"
tasks:
# Upgrade: update config.
- block:
# NOTE(mgoddard): This only affects the remote copy of the repo, not the
# one on the executor.
- name: checkout the current kolla-ansible branch
shell:
cmd: |
git checkout {{ zuul.branch }}
echo "kolla-ansible checked out to:"
git log --pretty=oneline -1
- name: checkout the current requirements branch
shell:
cmd: |
git checkout {{ zuul.branch }}
echo "requirements checked out to:"
git log --pretty=oneline -1
chdir: "{{ ansible_env.HOME }}/src/opendev.org/openstack/requirements/"
- name: Generate configuration files
template:
src: "{{ kolla_ansible_local_src_dir }}/{{ item.src }}"
dest: "{{ item.dest }}"
vars:
is_previous_release: false
with_items:
# Ansible inventory
- src: "tests/templates/inventory.j2"
dest: "{{ kolla_inventory_path }}"
# globals.yml
- src: "tests/templates/globals-default.j2"
dest: /etc/kolla/globals.yml
# nova-compute.conf
- src: "tests/templates/nova-compute-overrides.j2"
dest: /etc/kolla/config/nova/nova-compute.conf
when: item.when | default(true)
name: "{{ kolla_ansible_src_dir }}"
extra_args: "-c {{ upper_constraints_file }} --user"
# Update passwords.yml to include any new passwords added in this
# release.
- name: move passwords.yml to passwords.yml.old
command: mv /etc/kolla/passwords.yml /etc/kolla/passwords.yml.old
- name: copy passwords.yml file
copy:
src: "{{ kolla_ansible_src_dir }}/etc/kolla/passwords.yml"
dest: /etc/kolla/passwords.yml
remote_src: true
- name: generate new passwords
command: >-
kolla-mergepwd
--old /etc/kolla/passwords.yml.old
--new /etc/kolla/passwords.yml
--final /etc/kolla/passwords.yml
# Perform an upgrade to the in-development code.
- name: Run upgrade.sh script
shell:
cmd: tests/upgrade.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
# NOTE(yoctozepto): this is nice as the first step after the upgrade
# because it waits for the services to stabilize well enough so that
# the dashboard is able to show the login prompt
- name: Run test-dashboard.sh script
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
TLS_ENABLED: "{{ tls_enabled }}"
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
SCENARIO: "{{ scenario }}"
when: openstack_core_tested
- block:
- name: Run deploy-bifrost.sh script
shell:
cmd: tests/deploy-bifrost.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run test-bifrost.sh script
shell:
cmd: tests/test-bifrost.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run upgrade-bifrost.sh script
shell:
cmd: tests/upgrade-bifrost.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "bifrost"
# NOTE(yoctozepto): each host checks itself
- hosts: all
- name: Post-deploy/upgrade sanity checks
block:
- name: Run check-failure.sh script
shell:
cmd: tests/check-failure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run check-config.sh script
shell:
cmd: tests/check-config.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run check-logs.sh script
shell:
cmd: tests/check-logs.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- hosts: primary
any_errors_fatal: true
environment: "{{ pip_user_path_env }}"
tasks:
- name: Run reconfigure.sh script
script:
cmd: reconfigure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when:
- not is_upgrade
- scenario != "bifrost"
# NOTE(yoctozepto): each host checks itself
- hosts: all
any_errors_fatal: true
tasks:
- name: Post-reconfigure sanity checks
block:
- name: Run check-failure.sh script
shell:
cmd: tests/check-failure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run check-config.sh script
shell:
cmd: tests/check-config.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run check-logs.sh script
shell:
cmd: tests/check-logs.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when:
- not is_upgrade
- scenario != "bifrost"