openstack-ansible-tests/test-vars.yml

478 lines
22 KiB
YAML

---
# Copyright 2015, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# General Environment Settings
# NB the vip addresses aren't used, we specify per service
external_lb_vip_address: 127.0.0.1
internal_lb_vip_address: 127.0.0.1
debug: true
test_branch: "{{ lookup('env', 'TESTING_BRANCH') | default('master', true) }}"
## Container destruction when testing repeatedly
force_containers_destroy: True
force_containers_data_destroy: True
# Setup host variables that can be reused later
# These need to be defaulted if the group is not available
# To get around ansible evaluating the hostvars variable, we set a string
test_galera_group: "{{ ((groups['galera_all'] is defined) and (groups['galera_all'] | length > 0)) | ternary('galera_all', 'all_containers') }}"
test_rabbitmq_group: "{{ ((groups['rabbitmq_all'] is defined) and (groups['rabbitmq_all'] | length > 0)) | ternary('rabbitmq_all', 'all_containers') }}"
test_memcached_group: "{{ ((groups['memcached_all'] is defined) and (groups['memcached_all'] | length > 0)) | ternary('memcached_all', 'all_containers') }}"
test_keystone_group: "{{ ((groups['keystone_all'] is defined) and (groups['keystone_all'] | length > 0)) | ternary('keystone_all', 'all_containers') }}"
test_glance_group: "{{ ((groups['glance_all'] is defined) and (groups['glance_all'] | length > 0)) | ternary('glance_all', 'all_containers') }}"
test_nova_api_metadata_group: "{{ ((groups['nova_api_metadata'] is defined) and (groups['nova_api_metadata'] | length > 0)) | ternary('nova_api_metadata', 'all_containers') }}"
test_nova_api_compute_group: "{{ ((groups['nova_api_os_compute'] is defined) and (groups['nova_api_os_compute'] | length > 0)) | ternary('nova_api_os_compute', 'all_containers') }}"
test_nova_console_group: "{{ ((groups['nova_console'] is defined) and (groups['nova_console'] | length > 0)) | ternary('nova_console', 'all_containers') }}"
test_neutron_server_group: "{{ ((groups['neutron_server'] is defined) and (groups['neutron_server'] | length > 0)) | ternary('neutron_server', 'all_containers') }}"
test_swift_proxy_group: "{{ ((groups['swift_proxy'] is defined) and (groups['swift_proxy'] | length > 0)) | ternary('swift_proxy', 'all_containers') }}"
test_gnocchi_group: "{{ ((groups['gnocchi_all'] is defined) and (groups['gnocchi_all'] | length > 0)) | ternary('gnocchi_all', 'all_containers') }}"
test_ironic_api_group: "{{ ((groups['ironic_api'] is defined) and (groups['ironic_api'] | length > 0)) | ternary('ironic_api', 'all_containers') }}"
test_cinder_api_group: "{{ ((groups['cinder_api'] is defined) and (groups['cinder_api'] | length > 0)) | ternary('cinder_api', 'all_containers') }}"
test_galera_host: "{{ hostvars[groups[test_galera_group][0]]['ansible_host'] }}"
test_rabbitmq_host: "{{ hostvars[groups[test_rabbitmq_group][0]]['ansible_host'] }}"
test_memcached_host: "{{ hostvars[groups[test_memcached_group][0]]['ansible_host'] }}"
test_keystone_host: "{{ hostvars[groups[test_keystone_group][0]]['ansible_host'] }}"
test_glance_host: "{{ hostvars[groups[test_glance_group][0]]['ansible_host'] }}"
test_nova_api_metadata_host: "{{ hostvars[groups[test_nova_api_metadata_group][0]]['ansible_host'] }}"
test_nova_api_compute_host: "{{ hostvars[groups[test_nova_api_compute_group][0]]['ansible_host'] }}"
test_nova_console_host: "{{ hostvars[groups[test_nova_console_group][0]]['ansible_host'] }}"
test_neutron_server_host: "{{ hostvars[groups[test_neutron_server_group][0]]['ansible_host'] }}"
test_swift_proxy_host: "{{ hostvars[groups[test_swift_proxy_group][0]]['ansible_host'] }}"
test_gnocchi_api_host: "{{ hostvars[groups[test_gnocchi_group][0]]['ansible_host'] }}"
test_ironic_api_host: "{{ hostvars[groups[test_ironic_api_group][0]]['ansible_host'] }}"
test_cinder_api_host: "{{ hostvars[groups[test_cinder_api_group][0]]['ansible_host'] }}"
## LXC container default bind mounts
lxc_container_default_bind_mounts:
- host_directory: "/openstack/backup/{{ inventory_hostname }}"
container_directory: "/var/backup"
- host_directory: "/openstack/log/{{ inventory_hostname }}"
container_directory: "/var/log"
# LXC Settings
lxc_net_address: 10.100.100.1
lxc_net_netmask: 255.255.255.0
lxc_net_dhcp_range: 10.100.100.2,10.100.100.99
lxc_net_bridge: lxcbr0
lxc_kernel_options:
- { key: 'fs.inotify.max_user_instances', value: 1024 }
lxc_config_key_apparmor: "{{ lookup('pipe', 'lxc-info --version || echo 2.0.0') is version('3.0.0', 'lt') | ternary('aa_profile', 'apparmor.profile') }}"
lxc_container_config_list:
- "{{ (hostvars[physical_host]['ansible_distribution'] == 'Debian') | ternary('lxc.' ~ lxc_config_key_apparmor~'=unconfined', 'lxc.' ~ lxc_config_key_apparmor ~ '=lxc-openstack') }}"
# Galera Settings
galera_address: "{{ test_galera_host }}"
galera_root_password: secrete
galera_root_user: root
galera_innodb_buffer_pool_size: 256M
galera_innodb_log_buffer_size: 32M
galera_wsrep_node_name: "{{ inventory_hostname }}"
galera_wsrep_provider_options:
- { option: "gcache.size", value: "32M" }
galera_server_id: "{{ inventory_hostname | string_2_int }}"
galera_disable_privatedevices: true
# RabbitMQ Settings
rabbitmq_cookie_token: secrete
rabbitmq_servers: "{{ test_rabbitmq_host }}"
rabbitmq_use_ssl: False
rabbitmq_port: 5672
rabbitmq_password: "secrete"
# Oslo Messaging RPC Settings
oslomsg_rpc_transport: rabbit
oslomsg_rpc_port: "{{ rabbitmq_port }}"
oslomsg_rpc_servers: "{{ rabbitmq_servers }}"
oslomsg_rpc_use_ssl: "{{ rabbitmq_use_ssl }}"
oslomsg_rpc_host_group: "{{ test_rabbitmq_group }}"
oslomsg_rpc_password: "{{ rabbitmq_password }}"
oslomsg_rpc_policies:
- name: "HA"
pattern: '^(?!(amq\.)|(.*_fanout_)|(reply_)).*'
priority: 0
tags: "ha-mode=all"
# Oslo Messaging Notify Settings
oslomsg_notify_transport: rabbit
oslomsg_notify_port: "{{ rabbitmq_port }}"
oslomsg_notify_servers: "{{ rabbitmq_servers }}"
oslomsg_notify_use_ssl: "{{ rabbitmq_use_ssl }}"
oslomsg_notify_host_group: "{{ test_rabbitmq_group }}"
oslomsg_notify_password: "{{ rabbitmq_password }}"
oslomsg_notify_policies:
- name: "HA"
pattern: '^(?!(amq\.)|(.*_fanout_)|(reply_)).*'
priority: 0
tags: "ha-mode=all"
# Memcache Settings
memcached_listen: "{{ test_memcached_host }}"
memcached_servers: "{{ test_memcached_host }}"
memcached_encryption_key: "secrete"
memcached_disable_privatedevices: true
# Keystone Settings
keystone_ssl: True
keystone_admin_user_name: admin
keystone_admin_tenant_name: admin
keystone_auth_admin_password: "SuperSecretePassword"
keystone_service_adminuri_insecure: false
keystone_service_internaluri_insecure: false
keystone_service_publicuri: "http://{{ test_keystone_host }}:5000"
keystone_service_publicurl: "{{ keystone_service_publicuri }}/v3"
keystone_service_internaluri: "http://{{ test_keystone_host }}:5000"
keystone_service_internalurl: "{{ keystone_service_internaluri }}/v3"
keystone_service_adminuri: "http://{{ test_keystone_host }}:5000"
keystone_service_adminurl: "{{ keystone_service_adminuri }}/v3"
keystone_service_password: "secrete"
keystone_oslomsg_rpc_password: "{{ oslomsg_rpc_password }}"
keystone_oslomsg_notify_password: "{{ oslomsg_notify_password }}"
keystone_container_mysql_password: "SuperSecrete"
keystone_venv_tag: "testing"
keystone_developer_mode: true
keystone_git_install_branch: "{{ test_branch }}"
keystone_service_region: RegionOne
keystone_cache_servers: "[{% for host in groups[test_memcached_group] %}\"{{ hostvars[host]['ansible_host'] }}:11211\"{% if not loop.last %},{% endif %}{% endfor %}]"
# Glance specific settings
glance_service_publicuri: "http://{{ test_glance_host }}:9292"
glance_service_publicurl: "{{ glance_service_publicuri }}"
glance_service_internaluri: "http://{{ test_glance_host }}:9292"
glance_service_internalurl: "{{ glance_service_internaluri }}"
glance_service_adminuri: "http://{{ test_glance_host }}:9292"
glance_service_adminurl: "{{ glance_service_adminuri }}"
glance_service_port: 9292
glance_service_user_name: glance
glance_container_mysql_password: "SuperSecrete"
glance_developer_mode: true
glance_git_install_branch: "{{ test_branch }}"
glance_profiler_hmac_key: "secrete"
glance_oslomsg_rpc_password: "{{ oslomsg_rpc_password }}"
glance_oslomsg_notify_password: "{{ oslomsg_notify_password }}"
glance_service_password: "secrete"
glance_venv_tag: "testing"
glance_host: "{{ test_glance_host }}"
# Nova specific settings
nova_api_container_mysql_password: "SuperSecrete"
nova_glance_api_servers: "http://{{ test_glance_host }}:9292"
nova_container_mysql_password: "SuperSecrete"
nova_developer_mode: true
nova_git_install_branch: "{{ test_branch }}"
nova_keystone_auth_plugin: password
nova_management_address: "{{ ansible_host }}"
nova_metadata_port: 8775
nova_metadata_host: "{{ test_nova_api_metadata_host }}"
nova_metadata_protocol: http
nova_metadata_insecure: False
nova_metadata_proxy_secret: "secrete"
nova_novncproxy_vncserver_listen: localhost
nova_novncproxy_vncserver_proxyclient_address: localhost
nova_oslomsg_rpc_password: "{{ oslomsg_rpc_password }}"
nova_oslomsg_notify_password: "{{ oslomsg_notify_password }}"
nova_program_name: nova-api-os-compute
nova_service_adminuri: "http://{{ test_nova_api_compute_host }}:8774"
nova_service_adminurl: "{{ nova_service_adminuri }}/v2.1"
nova_service_publicuri: "http://{{ test_nova_api_compute_host }}:8774"
nova_service_publicurl: "{{ nova_service_publicuri }}/v2.1"
nova_service_internaluri: "http://{{ test_nova_api_compute_host }}:8774"
nova_service_internalurl: "{{ nova_service_internaluri }}/v2.1"
nova_spice_html5proxy_base_uri: "http://{{ test_nova_console_host }}:6082"
nova_spice_html5proxy_base_url: "{{ nova_spice_html5proxy_base_uri }}/spice_auto.html"
nova_service_password: "secrete"
nova_service_project_domain_id: default
nova_service_project_name: service
nova_service_region: RegionOne
nova_service_user_domain_id: default
nova_service_user_name: nova
nova_venv_bin: "/openstack/venvs/nova-{{ nova_venv_tag }}/bin"
nova_venv_tag: "testing"
nova_discover_hosts_in_cells_interval: '-1'
# lxd specific vars
lxd_trust_password: "SuperSecrete"
nova_cell0_database: "nova_cell0"
# Neutron specific settings
neutron_container_mysql_password: SuperSecrete
neutron_developer_mode: true
neutron_git_install_branch: "{{ test_branch }}"
neutron_ha_vrrp_auth_password: secrete
neutron_management_address: "{{ test_neutron_server_host }}"
neutron_oslomsg_rpc_password: "{{ oslomsg_rpc_password }}"
neutron_oslomsg_notify_password: "{{ oslomsg_notify_password }}"
neutron_service_publicuri: "http://{{ test_neutron_server_host }}:9696"
neutron_service_publicurl: "{{ neutron_service_publicuri }}"
neutron_service_adminuri: "http://{{ test_neutron_server_host }}:9696"
neutron_service_adminurl: "{{ neutron_service_adminuri }}"
neutron_service_internaluri: "http://{{ test_neutron_server_host }}:9696"
neutron_serivce_internalurl: "{{ neutron_service_internaluri }}"
neutron_service_password: "secrete"
neutron_service_project_name: service
neutron_service_region: RegionOne
neutron_service_user_name: neutron
neutron_venv_tag: testing
neutron_provider_networks:
network_types: "vxlan"
network_vxlan_ranges: "1:1000"
neutron_plugin_type: ml2.lxb
# Cinder specific settings
cinder_backends_rbd_inuse: false
cinder_ceph_client: cinder
cinder_container_mysql_password: "SuperSecrete"
cinder_developer_mode: true
cinder_git_install_branch: "{{ test_branch }}"
cinder_glance_api_servers: "http://{{ test_glance_host }}:9292"
cinder_profiler_hmac_key: "secrete"
cinder_oslomsg_rpc_password: "{{ oslomsg_rpc_password }}"
cinder_oslomsg_notify_password: "{{ oslomsg_notify_password }}"
cinder_service_password: "secrete"
cinder_venv_tag: "testing"
cinder_service_port: 8776
cinder_service_proto: http
cinder_service_publicuri: "{{ cinder_service_proto }}://{{ test_cinder_api_host }}:{{ cinder_service_port }}"
cinder_service_publicurl: "{{ cinder_service_publicuri }}/v1/%(tenant_id)s"
cinder_service_adminuri: "{{ cinder_service_proto }}://{{ test_cinder_api_host }}:{{ cinder_service_port }}"
cinder_service_adminurl: "{{ cinder_service_adminuri }}/v1/%(tenant_id)s"
cinder_service_internaluri: "{{ cinder_service_proto }}://{{ test_cinder_api_host }}:{{ cinder_service_port }}"
cinder_service_internalurl: "{{ cinder_service_internaluri }}/v1/%(tenant_id)s"
cinder_service_v2_port: 8776
cinder_service_v2_proto: http
cinder_service_v2_publicuri: "{{ cinder_service_v2_proto }}://{{ tst_cinder_api_host }}:{{ cinder_service_v2_port }}"
cinder_service_v2_publicurl: "{{ cinder_service_publicuri }}/v2/%(tenant_id)s"
cinder_service_v2_adminuri: "{{ cinder_service_v2_proto }}://{{ test_cinder_api_host }}:{{ cinder_service_v2_port }}"
cinder_service_v2_adminurl: "{{ cinder_service_adminuri }}/v2/%(tenant_id)s"
cinder_service_v2_internaluri: "{{ cinder_service_v2_proto }}://{{ test_cinder_api_host }}:{{ cinder_service_v2_port }}"
cinder_service_v2_internalurl: "{{ cinder_service_internaluri }}/v2/%(tenant_id)s"
cinder_backends:
lvm:
volume_group: cinder-volumes
volume_driver: cinder.volume.drivers.lvm.LVMVolumeDriver
volume_backend_name: LVM_iSCSI
cinder_default_volume_type: lvm
# Swift specific settings
swift_oslomsg_notify_password: "{{ oslomsg_notify_password }}"
swift_container_mysql_password: "SuperSecrete"
swift_dispersion_password: "secrete"
swift_hash_path_prefix: "secrete_prefx"
swift_hash_path_suffix: "secrete_suffix"
swift_service_password: "secrete"
swift_developer_mode: true
swift_git_install_branch: "{{ test_branch }}"
swift_venv_tag: untagged
swift_venv_bin: "/openstack/venvs/swift-{{ swift_venv_tag }}/bin"
swift_service_publicuri: "http://{{ test_swift_proxy_host }}:8080"
swift_service_adminuri: "http://{{ test_swift_proxy_host }}:8080"
swift_service_internaluri: "http://{{ test_swift_proxy_host }}:8080"
swift:
storage_network: "{{ test_swift_storage_network | default('eth2') }}"
replication_network: "{{ test_swift_repl_network | default('eth3') }}"
part_power: 8
repl_number: "{{ test_swift_repl_number | default(3) }}"
region: "{{ test_swift_region | default(1) }}"
drives:
- name: swift1
- name: swift2
mount_point: /openstack
storage_policies:
- policy:
name: gold
index: 0
default: True
swift_proxy_server_conf_overrides:
"filter:keystoneauth":
reseller_prefix: "AUTH, SERVICE"
"SERVICE_service_roles": "test5"
# Heat specific settings
heat_venv_tag: "testing"
heat_developer_mode: True
heat_git_install_branch: "{{ test_branch }}"
heat_service_password: secrete
heat_stack_domain_admin_password: secrete
heat_auth_encryption_key: 32characterslongboguskeyvaluefoo
heat_container_mysql_password: "SuperSecrete"
heat_oslomsg_rpc_password: "{{ oslomsg_rpc_password }}"
heat_oslomsg_notify_password: "{{ oslomsg_notify_password }}"
# Gnocchi specific settings
gnocchi_venv_tag: "testing"
gnocchi_developer_mode: true
gnocchi_service_publicuri: "http://{{ test_gnocchi_api_host }}:8041"
gnocchi_service_adminuri: "http://{{ test_gnocchi_api_host }}:8041"
gnocchi_service_internaluri: "http://{{ test_gnocchi_api_host }}:8041"
gnocchi_container_mysql_password: "secrete"
gnocchi_keystone_auth: yes
gnocchi_service_password: "secrete"
gnocchi_ssl_external: false
# Ironic specific settings
ironic_venv_tag: "testing"
ironic_developer_mode: True
ironic_git_install_branch: "{{ test_branch }}"
ironic_service_publicuri: "http://{{ test_ironic_api_host }}:6385"
ironic_service_publicurl: "{{ ironic_service_publicuri }}"
ironic_service_adminuri: "http://{{ test_ironic_api_host }}:6385"
ironic_service_adminurl: "{{ ironic_service_adminuri }}"
ironic_service_internaluri: "http://{{ test_ironic_api_host }}:6385"
ironic_service_internalurl: "{{ ironic_service_internaluri }}"
ironic_service_password: "secrete"
ironic_service_name: ironic
ironic_service_project_name: "service"
ironic_container_mysql_password: "secrete"
ironic_oslomsg_rpc_password: "{{ oslomsg_rpc_password }}"
ironic_oslomsg_notify_password: "{{ oslomsg_notify_password }}"
ironic_swift_temp_url_secret_key: secrete
ironic_keystone_auth_plugin: password
ironic_glance_api_servers: "http://{{ test_glance_host }}:9292"
# Horizon Settings
horizon_developer_mode: true
horizon_venv_tag: "testing"
horizon_git_install_branch: "{{ test_branch }}"
horizon_requirements_git_install_branch: "{{ test_branch }}"
horizon_container_mysql_password: "SuperSecrete"
horizon_secret_key: "SuperSecreteHorizonKey"
horizon_external_ssl: False
# Tempest specific settings
tempest_git_repo: https://opendev.org/openstack/tempest
# We prefer to use the released versions of tempest for stable branches, but
# we deploy the latest for the master branch.
tempest_developer_mode: "{{ test_branch == 'master' }}"
tempest_venv_download: False
# tempest_venv_bin is set to use the same naming convention
# as the other venvs (with the tag 'testing')
tempest_venv_bin: "/opt/tempest-testing/bin"
tempest_public_subnet_cidr: 10.1.3.0/24
tempest_public_subnet_allocation_pools: "10.1.3.150-10.1.3.200"
tempest_log_dir: "/var/log/"
tempest_main_group: utility_all
tempest_run: yes
tempest_service_available_aodh: "{{ ((groups['aodh_all'] is defined) and (groups['aodh_all'] | length > 0)) }}"
tempest_service_available_ceilometer: "{{ ((groups['ceilometer_all'] is defined) and (groups['ceilometer_all'] | length > 0)) }}"
tempest_service_available_cinder: "{{ ((groups['cinder_all'] is defined) and (groups['cinder_all'] | length > 0)) }}"
tempest_service_available_glance: "{{ ((groups['glance_all'] is defined) and (groups['glance_all'] | length > 0)) }}"
tempest_service_available_heat: "{{ ((groups['heat_all'] is defined) and (groups['heat_all'] | length > 0)) }}"
tempest_service_available_horizon: "{{ ((groups['horizon_all'] is defined) and (groups['horizon_all'] | length > 0)) }}"
tempest_service_available_neutron: "{{ ((groups['neutron_all'] is defined) and (groups['neutron_all'] | length > 0)) }}"
tempest_service_available_nova: "{{ ((groups['nova_all'] is defined) and (groups['nova_all'] | length > 0)) }}"
tempest_service_available_swift: "{{ ((groups['swift_all'] is defined) and (groups['swift_all'] | length > 0)) }}"
tempest_service_available_zaqar: "{{ ((groups['zaqar_all'] is defined) and (groups['zaqar_all'] | length > 0)) }}"
tempest_use_tempestconf: false
tempest_image_dir: "/opt/cache/files"
# openrc settings
openrc_os_password: "{{ keystone_auth_admin_password }}"
openrc_os_domain_name: "Default"
openrc_os_auth_url: "http://{{ test_keystone_host }}:5000/v3"
# Set workers for all services to optimise memory usage
ceilometer_api_workers: 2
ceilometer_collector_workers: 2
ceilometer_notification_workers: 2
cinder_osapi_volume_workers: 2
glance_api_threads: 2
heat_api_threads: 2
horizon_wsgi_threads: 2
keystone_wsgi_processes: 2
neutron_api_workers: 2
neutron_metadata_workers: 1
neutron_rpc_workers: 1
nova_api_threads: 2
nova_wsgi_processes: 2
repo_nginx_threads: 2
swift_account_server_workers: 2
swift_container_server_workers: 2
swift_object_server_workers: 2
swift_proxy_server_workers: 2
glance_wsgi_processes: 2
cinder_wsgi_processes: 2
neutron_wsgi_processes: 2
swift_wsgi_processes: 2
heat_wsgi_processes: 2
octavia_wsgi_processes: 2
ironic_wsgi_processes: 2
# PIP install options
# This ensures that libvirt-python is built from source. A pre-built
# wheel can be missing libvirt capabilities from the installed version
# of libvirt-bin, leading to nova-compute failing to start.
#
# NOTE(hwoarang) cryptography may bundle openssl in the wheel and that
# causes symbol conflicts if a different openssl is provided by the
# distribution. As such, it's probably safer to re-build cryptography
# ourselves just to be sure that the correct distro libraries are used
# see https://github.com/pyca/cryptography/issues/3804
# This keeps poping up every now and then so it might worth keeping this
# around even if the upstream issue is resolved
#Cryptography is shipping with wheels, and recent bugs for
#dynamic linking in wheel building [1] should be fixed now.
# We should therefore use the wheels.
# 1: https://github.com/pyca/cryptography/pull/3811
pip_install_options: "--no-binary libvirt-python"
# The URL/path of a constraints file to use when installing the additional pip packages.
pip_install_upper_constraints: "https://opendev.org/openstack/openstack-ansible/raw/{{ test_branch }}/global-requirement-pins.txt --constraint https://opendev.org/openstack/requirements/raw/{{ test_branch }}/upper-constraints.txt"
requirements_git_install_branch: "{{ test_branch }}"
# Overriding the developer_constraints allows us to install from local disk as a precedence
# if the repo is present.
pip_install_developer_constraints: "--constraint {{ development_repo_directory }}/local-package-constraints-{{ test_branch | replace('/','_') }}.txt --constraint /opt/developer-pip-constraints.txt"
# As role tests are executed in temporary environments, we don't need
# to always fetch the latest get-pip.py script
pip_get_pip_force: no
# Test install options
install_test_packages: True
## Parameters provided to the wait_for_connection module after a container
## reboot is triggered by the role
lxc_container_wait_params:
# Wait 3 seconds before attempting the first connection
delay: 3
# Wait 60 seconds for the container to respond
timeout: 60
## Developer mode variables
# Location of development repositories directory
development_repo_directory: "{{ lookup('env', 'OPENSTACK_REPO_DIRECTORY') | default(lookup('env', 'TESTING_HOME') ~ '/src', true) }}"
## Container bind mounts
# Bind mount the repositories into each container to allow development work
lxc_container_bind_mounts:
- host_directory: "{{ development_repo_directory }}"
container_directory: "{{ development_repo_directory }}"
# Bind mount the pip cache into each container to speed up builds
- host_directory: "/root/.cache/pip"
container_directory: "/root/.cache/pip"
## haproxy variables
haproxy_ssl: no
openstack_user_kernel_options:
- key: 'kernel.printk'
value: '4 1 7 4'
#python3
## OpenStack service python version
openstack_venv_python_executable: "python3"
uwsgi_python_executable: "{{ openstack_venv_python_executable }}"