Add Kinetic and Zed support

* sync charm-helpers to classic charms
* change openstack-origin/source default to zed
* align testing with zed
* add new zed bundles
* add zed bundles to tests.yaml
* add zed tests to osci.yaml and .zuul.yaml
* update build-on and run-on bases
* add bindep.txt for py310
* sync tox.ini and requirements.txt for ruamel
* use charmcraft_channel 2.0/stable
* drop reactive plugin overrides
* move interface/layer env vars to charmcraft.yaml

Change-Id: I2cb698f719106e54b06009f24ea47259419e9cad
This commit is contained in:
Corey Bryant 2022-06-10 20:14:48 +00:00
parent e47ecb9359
commit 448daa6fdf
24 changed files with 124 additions and 343 deletions

View File

@ -1,4 +1,4 @@
- project:
templates:
- openstack-python3-charm-yoga-jobs
- openstack-python3-charm-zed-jobs
- openstack-cover-jobs

3
bindep.txt Normal file
View File

@ -0,0 +1,3 @@
libffi-dev [platform:dpkg]
libxml2-dev [platform:dpkg]
libxslt1-dev [platform:dpkg]

View File

@ -24,13 +24,10 @@ parts:
bases:
- build-on:
- name: ubuntu
channel: "20.04"
channel: "22.04"
architectures:
- amd64
run-on:
- name: ubuntu
channel: "20.04"
architectures: [amd64, s390x, ppc64el, arm64]
- name: ubuntu
channel: "22.04"
architectures: [amd64, s390x, ppc64el, arm64]

View File

@ -24,8 +24,6 @@
import os
from base64 import b64decode
from charmhelpers.core import host
from charmhelpers.core.hookenv import (
config as config_get,
@ -33,13 +31,8 @@ from charmhelpers.core.hookenv import (
relation_ids,
related_units as relation_list,
log,
ERROR,
INFO,
)
from charmhelpers.contrib.openstack.cert_utils import (
x509_get_pubkey,
x509_validate_cert,
)
# This file contains the CA cert from the charms ssl_ca configuration
# option, in future the file name should be updated reflect that.
@ -68,11 +61,6 @@ def get_cert(cn=None):
if not key:
key = relation_get(ssl_key_attr,
rid=r_id, unit=unit)
# this likely fails too quietly, raise?
if not x509_validate_cert(b64decode(cert), ssl_key=b64decode(key)):
return (None, None)
return (cert, key)
@ -87,11 +75,6 @@ def get_ca_cert():
if ca_cert is None:
ca_cert = relation_get('ca_cert',
rid=r_id, unit=unit)
# this likely fails too quietly, raise?
if not x509_get_pubkey(b64decode(ca_cert)):
return None
return ca_cert

View File

@ -467,7 +467,7 @@ def ns_query(address):
try:
answers = dns.resolver.query(address, rtype)
except dns.resolver.NXDOMAIN:
except (dns.resolver.NXDOMAIN, dns.resolver.NoNameservers):
return None
if answers:

View File

@ -16,12 +16,8 @@
import os
import json
import subprocess
import tempfile
from base64 import b64decode
import six
from charmhelpers.contrib.network.ip import (
get_hostname,
resolve_network_cidr,
@ -38,7 +34,6 @@ from charmhelpers.core.hookenv import (
log,
WARNING,
INFO,
ERROR,
)
from charmhelpers.contrib.openstack.ip import (
resolve_address,
@ -58,9 +53,9 @@ from charmhelpers.core.host import (
write_file,
)
# This file contains the CA cert from the charms ssl_ca configuration
# option, in future the file name should be updated reflect that.
CONFIG_CA_CERT_FILE = 'keystone_juju_ca_cert'
from charmhelpers.contrib.hahelpers.apache import (
CONFIG_CA_CERT_FILE,
)
class CertRequest(object):
@ -342,7 +337,7 @@ def _manage_ca_certs(ca, cert_relation_id):
"""
config_ssl_ca = config('ssl_ca')
config_cert_file = ca_cert_absolute_path(CONFIG_CA_CERT_FILE)
if config_ssl_ca and x509_get_pubkey(b64decode(config_ssl_ca)):
if config_ssl_ca:
log("Installing CA certificate from charm ssl_ca config to {}".format(
config_cert_file), INFO)
install_ca_cert(
@ -351,12 +346,10 @@ def _manage_ca_certs(ca, cert_relation_id):
elif os.path.exists(config_cert_file):
log("Removing CA certificate {}".format(config_cert_file), INFO)
os.remove(config_cert_file)
if x509_get_pubkey(ca.encode()):
log("Installing CA certificate from certificate relation", INFO)
install_ca_cert(
ca.encode(),
name=get_cert_relation_ca_name(cert_relation_id))
log("Installing CA certificate from certificate relation", INFO)
install_ca_cert(
ca.encode(),
name=get_cert_relation_ca_name(cert_relation_id))
def process_certificates(service_name, relation_id, unit,
@ -448,79 +441,3 @@ def get_bundle_for_cn(cn, relation_name=None):
if cert_bundle:
break
return cert_bundle
def _x509_normalize_input(input_):
if isinstance(input_, six.text_type):
return input_.encode()
if isinstance(input_, bytes):
return input_
return bytes()
def x509_validate_cert_chain(ssl_cert, ssl_ca=None):
cmd = ['openssl', 'verify']
with tempfile.TemporaryFile() as cert_fd, tempfile.NamedTemporaryFile() as ca_fd:
cert_fd.write(ssl_cert); cert_fd.seek(0) # noqa: E702
if ssl_ca:
ca_fd.write(ssl_ca); ca_fd.seek(0) # noqa: E702
cmd.extend(['-CAfile', ca_fd.name])
try:
subprocess.check_output(cmd, stdin=cert_fd)
return True
except subprocess.CalledProcessError as e:
log('Chain verification exited with code {} and output:\n{}'.format(
e.returncode, e.output.decode()), level=ERROR)
return False
raise Exception('Error during certificate chain validation')
def x509_get_pubkey(input_, private=False):
# since we don't want to leak private key material and generally wouldn't
# want to loiter on the filesystem more than it's necessary, we can't use
# `ssl.SSLContext.load_cert_chain` to confirm cert-key parity without
# dropping `tempfile.NamedTemporaryFile`s and passing their paths in
#
# issue with this approach is that any sort of exception condition may let
# these files linger, posing a leakage risk, so the best we can afford is
# `tempfile.TemporaryFile` which immediately removes filesystem-mappable
# paths, while keeping a file descriptor, sufficient for passing as stdin
# to subprocess calls
input_ = _x509_normalize_input(input_)
if not input_:
return bytes()
cmd = ['openssl']
cmd += ['pkey', '-pubout'] if private else ['x509', '-pubkey', '-noout']
cmd += ['-in', '/dev/stdin']
with tempfile.TemporaryFile() as fd:
fd.write(input_); fd.seek(0) # noqa: E702
try:
return subprocess.check_output(cmd, stdin=fd).strip()
except subprocess.CalledProcessError as e:
log('Getting public key exited with code {} and output:\n{}'.format(
e.returncode, e.output.decode()), level=ERROR)
return bytes()
def x509_validate_cert_parity(ssl_cert, ssl_key):
priv = x509_get_pubkey(ssl_key, private=True)
pub = x509_get_pubkey(ssl_cert)
return all([priv, pub, priv == pub])
def x509_validate_cert(ssl_cert, ssl_key=None, ssl_ca=None, validate_chain=False):
# normalize string input types
ssl_cert = _x509_normalize_input(ssl_cert)
ssl_key = _x509_normalize_input(ssl_key)
ssl_ca = _x509_normalize_input(ssl_ca)
return all([
x509_validate_cert_parity(
ssl_cert, ssl_key) if ssl_cert and ssl_key else True,
x509_validate_cert_chain(ssl_cert, ssl_ca) if validate_chain else True
])

View File

@ -87,10 +87,6 @@ from charmhelpers.contrib.hahelpers.apache import (
get_ca_cert,
install_ca_cert,
)
from charmhelpers.contrib.openstack.cert_utils import (
x509_get_pubkey,
x509_validate_cert,
)
from charmhelpers.contrib.openstack.neutron import (
neutron_plugin_attribute,
parse_data_port_mappings,
@ -317,22 +313,17 @@ class PostgresqlDBContext(OSContextGenerator):
def db_ssl(rdata, ctxt, ssl_dir):
ssl_ca = b64decode(rdata.get('ssl_ca', bytes()))
if 'ssl_ca' in rdata and x509_get_pubkey(ssl_ca) and ssl_dir:
if 'ssl_ca' in rdata and ssl_dir:
ca_path = os.path.join(ssl_dir, 'db-client.ca')
with open(ca_path, 'wb') as fh:
fh.write(ssl_ca)
fh.write(b64decode(rdata['ssl_ca']))
ctxt['database_ssl_ca'] = ca_path
elif 'ssl_ca' in rdata:
log("Charm not setup for ssl support but ssl ca found", level=INFO)
return ctxt
ssl_cert = b64decode(rdata.get('ssl_cert', bytes()))
ssl_key = b64decode(rdata.get('ssl_key', bytes()))
if 'ssl_cert' in rdata and x509_validate_cert(
ssl_cert, ssl_key, ssl_ca.decode() if ssl_ca else None
):
if 'ssl_cert' in rdata:
cert_path = os.path.join(
ssl_dir, 'db-client.cert')
if not os.path.exists(cert_path):
@ -340,12 +331,12 @@ def db_ssl(rdata, ctxt, ssl_dir):
time.sleep(60)
with open(cert_path, 'wb') as fh:
fh.write(ssl_cert)
fh.write(b64decode(rdata['ssl_cert']))
ctxt['database_ssl_cert'] = cert_path
key_path = os.path.join(ssl_dir, 'db-client.key')
with open(key_path, 'wb') as fh:
fh.write(ssl_key)
fh.write(b64decode(rdata['ssl_key']))
ctxt['database_ssl_key'] = key_path
@ -706,7 +697,7 @@ class AMQPContext(OSContextGenerator):
rabbitmq_port = ssl_port
ssl_ca = relation_get('ssl_ca', rid=rid, unit=unit)
if ssl_ca and x509_get_pubkey(b64decode(ssl_ca)):
if ssl_ca:
ctxt['rabbit_ssl_ca'] = ssl_ca
if relation_get('ha_queues', rid=rid, unit=unit) is not None:
@ -2569,14 +2560,18 @@ class OVSDPDKDeviceContext(OSContextGenerator):
:rtype: List[int]
"""
cores = []
ranges = cpulist.split(',')
for cpu_range in ranges:
if "-" in cpu_range:
cpu_min_max = cpu_range.split('-')
cores += range(int(cpu_min_max[0]),
int(cpu_min_max[1]) + 1)
else:
cores.append(int(cpu_range))
if cpulist and re.match(r"^[0-9,\-^]*$", cpulist):
ranges = cpulist.split(',')
for cpu_range in ranges:
if "-" in cpu_range:
cpu_min_max = cpu_range.split('-')
cores += range(int(cpu_min_max[0]),
int(cpu_min_max[1]) + 1)
elif "^" in cpu_range:
cpu_rm = cpu_range.split('^')
cores.remove(int(cpu_rm[1]))
else:
cores.append(int(cpu_range))
return cores
def _numa_node_cores(self):
@ -2595,36 +2590,32 @@ class OVSDPDKDeviceContext(OSContextGenerator):
def cpu_mask(self):
"""Get hex formatted CPU mask
The mask is based on using the first config:dpdk-socket-cores
cores of each NUMA node in the unit.
:returns: hex formatted CPU mask
:rtype: str
"""
return self.cpu_masks()['dpdk_lcore_mask']
def cpu_masks(self):
"""Get hex formatted CPU masks
The mask is based on using the first config:dpdk-socket-cores
cores of each NUMA node in the unit, followed by the
next config:pmd-socket-cores
:returns: Dict of hex formatted CPU masks
:rtype: Dict[str, str]
"""
num_lcores = config('dpdk-socket-cores')
pmd_cores = config('pmd-socket-cores')
lcore_mask = 0
pmd_mask = 0
num_cores = config('dpdk-socket-cores')
mask = 0
for cores in self._numa_node_cores().values():
for core in cores[:num_lcores]:
lcore_mask = lcore_mask | 1 << core
for core in cores[num_lcores:][:pmd_cores]:
pmd_mask = pmd_mask | 1 << core
return {
'pmd_cpu_mask': format(pmd_mask, '#04x'),
'dpdk_lcore_mask': format(lcore_mask, '#04x')}
for core in cores[:num_cores]:
mask = mask | 1 << core
return format(mask, '#04x')
@classmethod
def pmd_cpu_mask(cls):
"""Get hex formatted pmd CPU mask
The mask is based on config:pmd-cpu-set.
:returns: hex formatted CPU mask
:rtype: str
"""
mask = 0
cpu_list = cls._parse_cpu_list(config('pmd-cpu-set'))
if cpu_list:
for core in cpu_list:
mask = mask | 1 << core
return format(mask, '#x')
def socket_memory(self):
"""Formatted list of socket memory configuration per socket.
@ -2703,6 +2694,7 @@ class OVSDPDKDeviceContext(OSContextGenerator):
ctxt['device_whitelist'] = self.device_whitelist()
ctxt['socket_memory'] = self.socket_memory()
ctxt['cpu_mask'] = self.cpu_mask()
ctxt['pmd_cpu_mask'] = self.pmd_cpu_mask()
return ctxt

View File

@ -158,6 +158,7 @@ OPENSTACK_CODENAMES = OrderedDict([
('2021.1', 'wallaby'),
('2021.2', 'xena'),
('2022.1', 'yoga'),
('2022.2', 'zed'),
])
# The ugly duckling - must list releases oldest to newest
@ -400,13 +401,16 @@ def get_os_codename_version(vers):
error_out(e)
def get_os_version_codename(codename, version_map=OPENSTACK_CODENAMES):
def get_os_version_codename(codename, version_map=OPENSTACK_CODENAMES,
raise_exception=False):
'''Determine OpenStack version number from codename.'''
for k, v in version_map.items():
if v == codename:
return k
e = 'Could not derive OpenStack version for '\
'codename: %s' % codename
if raise_exception:
raise ValueError(str(e))
error_out(e)

View File

@ -277,7 +277,7 @@ def service_resume(service_name, init_dir="/etc/init",
return started
def service(action, service_name, **kwargs):
def service(action, service_name=None, **kwargs):
"""Control a system service.
:param action: the action to take on the service
@ -286,7 +286,9 @@ def service(action, service_name, **kwargs):
the form of key=value.
"""
if init_is_systemd(service_name=service_name):
cmd = ['systemctl', action, service_name]
cmd = ['systemctl', action]
if service_name is not None:
cmd.append(service_name)
else:
cmd = ['service', service_name, action]
for key, value in kwargs.items():

View File

@ -30,6 +30,7 @@ UBUNTU_RELEASES = (
'hirsute',
'impish',
'jammy',
'kinetic',
)

View File

@ -15,7 +15,8 @@
import os
import json
import inspect
from collections import Iterable, OrderedDict
from collections import OrderedDict
from collections.abc import Iterable
from charmhelpers.core import host
from charmhelpers.core import hookenv

View File

@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import os
import hashlib
import re
@ -24,11 +25,15 @@ from charmhelpers.payload.archive import (
get_archive_handler,
extract,
)
from charmhelpers.core.hookenv import (
env_proxy_settings,
)
from charmhelpers.core.host import mkdir, check_hash
from urllib.request import (
build_opener, install_opener, urlopen, urlretrieve,
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
ProxyHandler
)
from urllib.parse import urlparse, urlunparse, parse_qs
from urllib.error import URLError
@ -50,6 +55,20 @@ def splitpasswd(user):
return user, None
@contextlib.contextmanager
def proxy_env():
"""
Creates a context which temporarily modifies the proxy settings in os.environ.
"""
restore = {**os.environ} # Copy the current os.environ
juju_proxies = env_proxy_settings() or {}
os.environ.update(**juju_proxies) # Insert or Update the os.environ
yield os.environ
for key in juju_proxies:
del os.environ[key] # remove any keys which were added or updated
os.environ.update(**restore) # restore any original values
class ArchiveUrlFetchHandler(BaseFetchHandler):
"""
Handler to download archive files from arbitrary URLs.
@ -80,6 +99,7 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
# propagate all exceptions
# URLError, OSError, etc
proto, netloc, path, params, query, fragment = urlparse(source)
handlers = []
if proto in ('http', 'https'):
auth, barehost = splituser(netloc)
if auth is not None:
@ -89,10 +109,13 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
# Realm is set to None in add_password to force the username and password
# to be used whatever the realm
passman.add_password(None, source, username, password)
authhandler = HTTPBasicAuthHandler(passman)
opener = build_opener(authhandler)
install_opener(opener)
response = urlopen(source)
handlers.append(HTTPBasicAuthHandler(passman))
with proxy_env():
handlers.append(ProxyHandler())
opener = build_opener(*handlers)
install_opener(opener)
response = urlopen(source)
try:
with open(dest, 'wb') as dest_file:
dest_file.write(response.read())

View File

@ -222,6 +222,14 @@ CLOUD_ARCHIVE_POCKETS = {
'yoga/proposed': 'focal-proposed/yoga',
'focal-yoga/proposed': 'focal-proposed/yoga',
'focal-proposed/yoga': 'focal-proposed/yoga',
# Zed
'zed': 'jammy-updates/zed',
'jammy-zed': 'jammy-updates/zed',
'jammy-zed/updates': 'jammy-updates/zed',
'jammy-updates/zed': 'jammy-updates/zed',
'zed/proposed': 'jammy-proposed/zed',
'jammy-zed/proposed': 'jammy-proposed/zed',
'jammy-proposed/zed': 'jammy-proposed/zed',
}
@ -248,6 +256,7 @@ OPENSTACK_RELEASES = (
'wallaby',
'xena',
'yoga',
'zed',
)
@ -274,6 +283,7 @@ UBUNTU_OPENSTACK_RELEASE = OrderedDict([
('hirsute', 'wallaby'),
('impish', 'xena'),
('jammy', 'yoga'),
('kinetic', 'zed'),
])

View File

@ -10,7 +10,7 @@ options:
Setting this to True will allow supporting services to log to syslog.
openstack-origin:
type: string
default: yoga
default: zed
description: |
Repository from which to install. May be one of the following:
distro (default), ppa:somecustom/ppa, a deb url sources entry,

View File

@ -7,11 +7,10 @@ description: |
tags:
- openstack
- misc
series:
- jammy
extra-bindings:
public: # Only used to allow specifying a hostname for DNS-HA.
series:
- focal
- jammy
provides:
nrpe-external-master:
interface: nrpe-external-master

View File

@ -1,9 +1,7 @@
- project:
templates:
- charm-unit-jobs-py38
- charm-unit-jobs-py310
- charm-xena-functional-jobs
- charm-yoga-functional-jobs
- charm-zed-functional-jobs
check:
jobs:
# gr tests are disabled due to:
@ -14,13 +12,13 @@
needs_charm_build: true
charm_build_name: openstack-dashboard
build_type: charmcraft
charmcraft_channel: 2.0/stable
- job:
name: jammy-yoga-gr
parent: func-target
dependencies:
- osci-lint
- tox-py38
- name: tox-py310
soft: true
- charm-build

View File

@ -11,9 +11,6 @@ pbr==5.6.0
simplejson>=2.2.0
netifaces>=0.10.4
# Build requirements
cffi==1.14.6; python_version < '3.6' # cffi 1.15.0 drops support for py35.
# NOTE: newer versions of cryptography require a Rust compiler to build,
# see
# * https://github.com/openstack-charmers/zaza/issues/421
@ -27,8 +24,6 @@ netaddr>0.7.16,<0.8.0
Jinja2>=2.6 # BSD License (3 clause)
six>=1.9.0
# dnspython 2.0.0 dropped py3.5 support
dnspython<2.0.0; python_version < '3.6'
dnspython; python_version >= '3.6'
dnspython
psutil>=1.1.1,<2.0.0

View File

@ -8,7 +8,6 @@
# all of its own requirements and if it doesn't, fix it there.
#
pyparsing<3.0.0 # aodhclient is pinned in zaza and needs pyparsing < 3.0.0, but cffi also needs it, so pin here.
cffi==1.14.6; python_version < '3.6' # cffi 1.15.0 drops support for py35.
setuptools<50.0.0 # https://github.com/pypa/setuptools/commit/04e3df22df840c6bb244e9b27bc56750c44b7c85
requests>=2.18.4
@ -19,26 +18,12 @@ stestr>=2.2.0
# https://github.com/mtreinish/stestr/issues/145
cliff<3.0.0
# Dependencies of stestr. Newer versions use keywords that didn't exist in
# python 3.5 yet (e.g. "ModuleNotFoundError")
importlib-metadata<3.0.0; python_version < '3.6'
importlib-resources<3.0.0; python_version < '3.6'
# Some Zuul nodes sometimes pull newer versions of these dependencies which
# dropped support for python 3.5:
osprofiler<2.7.0;python_version<'3.6'
stevedore<1.31.0;python_version<'3.6'
debtcollector<1.22.0;python_version<'3.6'
oslo.utils<=3.41.0;python_version<'3.6'
coverage>=4.5.2
pyudev # for ceph-* charm unit tests (need to fix the ceph-* charm unit tests/mocking)
git+https://github.com/openstack-charmers/zaza.git#egg=zaza
git+https://github.com/openstack-charmers/zaza-openstack-tests.git#egg=zaza.openstack
# Needed for charm-glance:
git+https://opendev.org/openstack/tempest.git#egg=tempest;python_version>='3.8'
tempest<31.0.0;python_version<'3.8'
tempest<24.0.0;python_version<'3.6'
git+https://opendev.org/openstack/tempest.git#egg=tempest
croniter # needed for charm-rabbitmq-server unit tests

View File

@ -1,36 +0,0 @@
series: focal
local_overlay_enabled: false
machines:
'0':
constraints: virt-type=kvm mem=3072M
'1':
constraints: virt-type=kvm mem=3072M
'2':
constraints: virt-type=kvm mem=3072M
'3':
services:
mysql-innodb-cluster:
charm: ch:mysql-innodb-cluster
num_units: 3
options:
source: distro
to:
- '0'
- '1'
- '2'
channel: latest/edge
vault-mysql-router:
charm: ch:mysql-router
channel: latest/edge
vault:
num_units: 1
series: bionic
charm: ch:vault
to:
- '4'
channel: latest/edge
relations:
- - vault:shared-db
- vault-mysql-router:shared-db
- - vault-mysql-router:db-router
- mysql-innodb-cluster:db-router

View File

@ -1,73 +0,0 @@
variables:
openstack-origin: &openstack-origin cloud:focal-yoga
series: focal
comment:
- 'machines section to decide order of deployment. database sooner = faster'
- 'virt-type=kvm is workaround while awaiting new release of python-libjuju'
machines:
'0':
constraints: virt-type=kvm mem=3072M
'1':
constraints: virt-type=kvm mem=3072M
'2':
constraints: virt-type=kvm mem=3072M
'3':
constraints: virt-type=kvm
'4':
constraints: virt-type=kvm mem=3072M
applications:
keystone-mysql-router:
charm: ch:mysql-router
channel: latest/edge
openstack-dashboard-mysql-router:
charm: ch:mysql-router
channel: latest/edge
mysql-innodb-cluster:
charm: ch:mysql-innodb-cluster
num_units: 3
options:
source: *openstack-origin
to:
- '0'
- '1'
- '2'
channel: latest/edge
keystone:
charm: ch:keystone
num_units: 1
options:
openstack-origin: *openstack-origin
to:
- '3'
channel: latest/edge
openstack-dashboard:
charm: ../../openstack-dashboard.charm
num_units: 1
options:
openstack-origin: *openstack-origin
to:
- '4'
relations:
- - 'keystone:shared-db'
- 'keystone-mysql-router:shared-db'
- - 'keystone-mysql-router:db-router'
- 'mysql-innodb-cluster:db-router'
- - 'openstack-dashboard:shared-db'
- 'openstack-dashboard-mysql-router:shared-db'
- - 'openstack-dashboard-mysql-router:db-router'
- 'mysql-innodb-cluster:db-router'
- - 'openstack-dashboard:identity-service'
- 'keystone:identity-service'

View File

@ -1,7 +1,7 @@
variables:
openstack-origin: &openstack-origin cloud:focal-xena
openstack-origin: &openstack-origin cloud:jammy-zed
series: focal
series: jammy
comment:
- 'machines section to decide order of deployment. database sooner = faster'

View File

@ -1,7 +1,7 @@
variables:
openstack-origin: &openstack-origin distro
series: impish
series: kinetic
comment:
- 'machines section to decide order of deployment. database sooner = faster'

View File

@ -4,15 +4,15 @@ comment:
- ''
smoke_bundles:
- focal-xena
- jammy-yoga
gate_bundles:
- focal-xena
- impish-xena
- jammy-yoga
dev_bundles:
- focal-yoga
- jammy-yoga
- jammy-zed
- kinetic-zed
- jammy-yoga-gr:
- vault: jammy-vault
- dashboard: jammy-yoga-cmr-vault
@ -45,8 +45,7 @@ tests_options:
policyd:
service: openstack-dashboard
force_deploy:
- impish-xena
- jammy-yoga
- kinetic-zed
target_deploy_status:
vault:

33
tox.ini
View File

@ -48,42 +48,23 @@ basepython = python3
deps = -r{toxinidir}/build-requirements.txt
commands =
charmcraft clean
charmcraft -v build
charmcraft -v pack
{toxinidir}/rename.sh
[testenv:py310]
basepython = python3.10
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
[testenv:py3]
basepython = python3
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
[testenv:py36]
basepython = python3.6
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = stestr run --slowest {posargs}
[testenv:py38]
basepython = python3.8
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = stestr run --slowest {posargs}
[testenv:py39]
basepython = python3.9
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = stestr run --slowest {posargs}
[testenv:py310]
basepython = python3.10
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = stestr run --slowest {posargs}
[testenv:pep8]
basepython = python3
deps = flake8==3.9.2
charm-tools==2.8.3
git+https://github.com/juju/charm-tools.git
commands = flake8 {posargs} hooks unit_tests tests actions lib files
charm-proof