Add 2023.2 Bobcat support

* sync charm-helpers to classic charms
* change openstack-origin/source default to quincy
* add mantic to metadata series
* align testing with bobcat
* add new bobcat bundles
* add bobcat bundles to tests.yaml
* add bobcat tests to osci.yaml
* update build-on and run-on bases
* drop kinetic

Change-Id: I7449eba63107b43525359fb92ae1a0ad9e648bab
This commit is contained in:
Corey Bryant 2023-07-18 16:47:18 -04:00
parent 6a0b48e916
commit 986981c6f4
17 changed files with 180 additions and 94 deletions

View File

@ -33,9 +33,9 @@ bases:
- name: ubuntu - name: ubuntu
channel: "22.04" channel: "22.04"
architectures: [amd64, s390x, ppc64el, arm64] architectures: [amd64, s390x, ppc64el, arm64]
- name: ubuntu
channel: "22.10"
architectures: [amd64, s390x, ppc64el, arm64]
- name: ubuntu - name: ubuntu
channel: "23.04" channel: "23.04"
architectures: [amd64, s390x, ppc64el, arm64] architectures: [amd64, s390x, ppc64el, arm64]
- name: ubuntu
channel: "23.10"
architectures: [amd64, s390x, ppc64el, arm64]

View File

@ -5,7 +5,7 @@ options:
description: OSD debug level. Max is 20. description: OSD debug level. Max is 20.
source: source:
type: string type: string
default: yoga default: quincy
description: | description: |
Optional configuration to support use of additional sources such as: Optional configuration to support use of additional sources such as:
. .

View File

@ -221,6 +221,13 @@ def https():
return True return True
if config_get('ssl_cert') and config_get('ssl_key'): if config_get('ssl_cert') and config_get('ssl_key'):
return True return True
# Local import to avoid ciruclar dependency.
import charmhelpers.contrib.openstack.cert_utils as cert_utils
if (
cert_utils.get_certificate_request() and not
cert_utils.get_requests_for_local_unit("certificates")
):
return False
for r_id in relation_ids('certificates'): for r_id in relation_ids('certificates'):
for unit in relation_list(r_id): for unit in relation_list(r_id):
ca = relation_get('ca', rid=r_id, unit=unit) ca = relation_get('ca', rid=r_id, unit=unit)

View File

@ -409,6 +409,9 @@ def get_requests_for_local_unit(relation_name=None):
relation_name = relation_name or 'certificates' relation_name = relation_name or 'certificates'
bundles = [] bundles = []
for rid in relation_ids(relation_name): for rid in relation_ids(relation_name):
sent = relation_get(rid=rid, unit=local_unit())
legacy_keys = ['certificate_name', 'common_name']
is_legacy_request = set(sent).intersection(legacy_keys)
for unit in related_units(rid): for unit in related_units(rid):
data = relation_get(rid=rid, unit=unit) data = relation_get(rid=rid, unit=unit)
if data.get(raw_certs_key): if data.get(raw_certs_key):
@ -416,6 +419,14 @@ def get_requests_for_local_unit(relation_name=None):
'ca': data['ca'], 'ca': data['ca'],
'chain': data.get('chain'), 'chain': data.get('chain'),
'certs': json.loads(data[raw_certs_key])}) 'certs': json.loads(data[raw_certs_key])})
elif is_legacy_request:
bundles.append({
'ca': data['ca'],
'chain': data.get('chain'),
'certs': {sent['common_name']:
{'cert': data.get(local_name + '.server.cert'),
'key': data.get(local_name + '.server.key')}}})
return bundles return bundles

View File

@ -1748,6 +1748,9 @@ class WSGIWorkerConfigContext(WorkerConfigContext):
def __call__(self): def __call__(self):
total_processes = _calculate_workers() total_processes = _calculate_workers()
enable_wsgi_rotation = config('wsgi-rotation')
if enable_wsgi_rotation is None:
enable_wsgi_rotation = True
ctxt = { ctxt = {
"service_name": self.service_name, "service_name": self.service_name,
"user": self.user, "user": self.user,
@ -1761,6 +1764,7 @@ class WSGIWorkerConfigContext(WorkerConfigContext):
"public_processes": int(math.ceil(self.public_process_weight * "public_processes": int(math.ceil(self.public_process_weight *
total_processes)), total_processes)),
"threads": 1, "threads": 1,
"wsgi_rotation": enable_wsgi_rotation,
} }
return ctxt return ctxt

View File

@ -160,6 +160,7 @@ OPENSTACK_CODENAMES = OrderedDict([
('2022.1', 'yoga'), ('2022.1', 'yoga'),
('2022.2', 'zed'), ('2022.2', 'zed'),
('2023.1', 'antelope'), ('2023.1', 'antelope'),
('2023.2', 'bobcat'),
]) ])
# The ugly duckling - must list releases oldest to newest # The ugly duckling - must list releases oldest to newest
@ -957,7 +958,7 @@ def os_requires_version(ostack_release, pkg):
def wrap(f): def wrap(f):
@wraps(f) @wraps(f)
def wrapped_f(*args): def wrapped_f(*args):
if os_release(pkg) < ostack_release: if CompareOpenStackReleases(os_release(pkg)) < ostack_release:
raise Exception("This hook is not supported on releases" raise Exception("This hook is not supported on releases"
" before %s" % ostack_release) " before %s" % ostack_release)
f(*args) f(*args)

View File

@ -28,7 +28,6 @@ import os
import shutil import shutil
import json import json
import time import time
import uuid
from subprocess import ( from subprocess import (
check_call, check_call,
@ -1677,6 +1676,10 @@ class CephBrokerRq(object):
The API is versioned and defaults to version 1. The API is versioned and defaults to version 1.
""" """
# The below hash is the result of running
# `hashlib.sha1('[]'.encode()).hexdigest()`
EMPTY_LIST_SHA = '97d170e1550eee4afc0af065b78cda302a97674c'
def __init__(self, api_version=1, request_id=None, raw_request_data=None): def __init__(self, api_version=1, request_id=None, raw_request_data=None):
"""Initialize CephBrokerRq object. """Initialize CephBrokerRq object.
@ -1685,8 +1688,12 @@ class CephBrokerRq(object):
:param api_version: API version for request (default: 1). :param api_version: API version for request (default: 1).
:type api_version: Optional[int] :type api_version: Optional[int]
:param request_id: Unique identifier for request. :param request_id: Unique identifier for request. The identifier will
(default: string representation of generated UUID) be updated as ops are added or removed from the
broker request. This ensures that Ceph will
correctly process requests where operations are
added after the initial request is processed.
(default: sha1 of operations)
:type request_id: Optional[str] :type request_id: Optional[str]
:param raw_request_data: JSON-encoded string to build request from. :param raw_request_data: JSON-encoded string to build request from.
:type raw_request_data: Optional[str] :type raw_request_data: Optional[str]
@ -1695,16 +1702,20 @@ class CephBrokerRq(object):
if raw_request_data: if raw_request_data:
request_data = json.loads(raw_request_data) request_data = json.loads(raw_request_data)
self.api_version = request_data['api-version'] self.api_version = request_data['api-version']
self.request_id = request_data['request-id']
self.set_ops(request_data['ops']) self.set_ops(request_data['ops'])
self.request_id = request_data['request-id']
else: else:
self.api_version = api_version self.api_version = api_version
if request_id: if request_id:
self.request_id = request_id self.request_id = request_id
else: else:
self.request_id = str(uuid.uuid1()) self.request_id = CephBrokerRq.EMPTY_LIST_SHA
self.ops = [] self.ops = []
def _hash_ops(self):
"""Return the sha1 of the requested Broker ops."""
return hashlib.sha1(json.dumps(self.ops, sort_keys=True).encode()).hexdigest()
def add_op(self, op): def add_op(self, op):
"""Add an op if it is not already in the list. """Add an op if it is not already in the list.
@ -1713,6 +1724,7 @@ class CephBrokerRq(object):
""" """
if op not in self.ops: if op not in self.ops:
self.ops.append(op) self.ops.append(op)
self.request_id = self._hash_ops()
def add_op_request_access_to_group(self, name, namespace=None, def add_op_request_access_to_group(self, name, namespace=None,
permission=None, key_name=None, permission=None, key_name=None,
@ -1991,6 +2003,7 @@ class CephBrokerRq(object):
to allow comparisons to ensure validity. to allow comparisons to ensure validity.
""" """
self.ops = ops self.ops = ops
self.request_id = self._hash_ops()
@property @property
def request(self): def request(self):

View File

@ -32,6 +32,7 @@ UBUNTU_RELEASES = (
'jammy', 'jammy',
'kinetic', 'kinetic',
'lunar', 'lunar',
'mantic',
) )

View File

@ -238,6 +238,14 @@ CLOUD_ARCHIVE_POCKETS = {
'antelope/proposed': 'jammy-proposed/antelope', 'antelope/proposed': 'jammy-proposed/antelope',
'jammy-antelope/proposed': 'jammy-proposed/antelope', 'jammy-antelope/proposed': 'jammy-proposed/antelope',
'jammy-proposed/antelope': 'jammy-proposed/antelope', 'jammy-proposed/antelope': 'jammy-proposed/antelope',
# bobcat
'bobcat': 'jammy-updates/bobcat',
'jammy-bobcat': 'jammy-updates/bobcat',
'jammy-bobcat/updates': 'jammy-updates/bobcat',
'jammy-updates/bobcat': 'jammy-updates/bobcat',
'bobcat/proposed': 'jammy-proposed/bobcat',
'jammy-bobcat/proposed': 'jammy-proposed/bobcat',
'jammy-proposed/bobcat': 'jammy-proposed/bobcat',
# OVN # OVN
'focal-ovn-22.03': 'focal-updates/ovn-22.03', 'focal-ovn-22.03': 'focal-updates/ovn-22.03',
@ -270,6 +278,7 @@ OPENSTACK_RELEASES = (
'yoga', 'yoga',
'zed', 'zed',
'antelope', 'antelope',
'bobcat',
) )
@ -298,6 +307,7 @@ UBUNTU_OPENSTACK_RELEASE = OrderedDict([
('jammy', 'yoga'), ('jammy', 'yoga'),
('kinetic', 'zed'), ('kinetic', 'zed'),
('lunar', 'antelope'), ('lunar', 'antelope'),
('mantic', 'bobcat'),
]) ])
@ -591,7 +601,7 @@ def _get_key_by_keyid(keyid):
curl_cmd = ['curl', keyserver_url.format(keyid)] curl_cmd = ['curl', keyserver_url.format(keyid)]
# use proxy server settings in order to retrieve the key # use proxy server settings in order to retrieve the key
return subprocess.check_output(curl_cmd, return subprocess.check_output(curl_cmd,
env=env_proxy_settings(['https'])) env=env_proxy_settings(['https', 'no_proxy']))
def _dearmor_gpg_key(key_asc): def _dearmor_gpg_key(key_asc):

View File

@ -122,13 +122,12 @@ class Cache(object):
:raises: subprocess.CalledProcessError :raises: subprocess.CalledProcessError
""" """
pkgs = {} pkgs = {}
cmd = ['dpkg-query', '--list'] cmd = [
'dpkg-query', '--show',
'--showformat',
r'${db:Status-Abbrev}\t${Package}\t${Version}\t${Architecture}\t${binary:Summary}\n'
]
cmd.extend(packages) cmd.extend(packages)
if locale.getlocale() == (None, None):
# subprocess calls out to locale.getpreferredencoding(False) to
# determine encoding. Workaround for Trusty where the
# environment appears to not be set up correctly.
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
try: try:
output = subprocess.check_output(cmd, output = subprocess.check_output(cmd,
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
@ -140,24 +139,17 @@ class Cache(object):
if cp.returncode != 1: if cp.returncode != 1:
raise raise
output = cp.output output = cp.output
headings = []
for line in output.splitlines(): for line in output.splitlines():
if line.startswith('||/'): # only process lines for successfully installed packages
headings = line.split() if not (line.startswith('ii ') or line.startswith('hi ')):
headings.pop(0)
continue continue
elif (line.startswith('|') or line.startswith('+') or status, name, version, arch, desc = line.split('\t', 4)
line.startswith('dpkg-query:')): pkgs[name] = {
continue 'name': name,
else: 'version': version,
data = line.split(None, 4) 'architecture': arch,
status = data.pop(0) 'description': desc,
if status not in ('ii', 'hi'): }
continue
pkg = {}
pkg.update({k.lower(): v for k, v in zip(headings, data)})
if 'name' in pkg:
pkgs.update({pkg['name']: pkg})
return pkgs return pkgs
def _apt_cache_show(self, packages): def _apt_cache_show(self, packages):

View File

@ -291,7 +291,8 @@ def pool_permission_list_for_service(service):
for prefix in prefixes: for prefix in prefixes:
permissions.append("allow {} object_prefix {}".format(permission, permissions.append("allow {} object_prefix {}".format(permission,
prefix)) prefix))
return ['mon', 'allow r, allow command "osd blacklist"', return ['mon', ('allow r, allow command "osd blacklist"'
', allow command "osd blocklist"'),
'osd', ', '.join(permissions)] 'osd', ', '.join(permissions)]

View File

@ -681,15 +681,29 @@ def _get_osd_num_from_dirname(dirname):
return match.group('osd_id') return match.group('osd_id')
def get_crimson_osd_ids():
"""Return a set of the OSDs that are running with the Crimson backend."""
rv = set()
try:
out = subprocess.check_output(['pgrep', 'crimson-osd', '-a'])
for line in out.decode('utf8').splitlines():
rv.add(line.split()[-1])
except Exception:
pass
return rv
def get_local_osd_ids(): def get_local_osd_ids():
"""This will list the /var/lib/ceph/osd/* directories and try """This will list the /var/lib/ceph/osd/* directories and try
to split the ID off of the directory name and return it in to split the ID off of the directory name and return it in
a list. a list. Excludes crimson OSD's from the returned list.
:returns: list. A list of OSD identifiers :returns: list. A list of OSD identifiers
:raises: OSError if something goes wrong with listing the directory. :raises: OSError if something goes wrong with listing the directory.
""" """
osd_ids = [] osd_ids = []
crimson_osds = get_crimson_osd_ids()
osd_path = os.path.join(os.sep, 'var', 'lib', 'ceph', 'osd') osd_path = os.path.join(os.sep, 'var', 'lib', 'ceph', 'osd')
if os.path.exists(osd_path): if os.path.exists(osd_path):
try: try:
@ -698,7 +712,8 @@ def get_local_osd_ids():
osd_id = osd_dir.split('-')[1] if '-' in osd_dir else '' osd_id = osd_dir.split('-')[1] if '-' in osd_dir else ''
if (_is_int(osd_id) and if (_is_int(osd_id) and
filesystem_mounted(os.path.join( filesystem_mounted(os.path.join(
os.sep, osd_path, osd_dir))): os.sep, osd_path, osd_dir)) and
osd_id not in crimson_osds):
osd_ids.append(osd_id) osd_ids.append(osd_id)
except OSError: except OSError:
raise raise
@ -1134,7 +1149,8 @@ def get_mds_bootstrap_key():
_default_caps = collections.OrderedDict([ _default_caps = collections.OrderedDict([
('mon', ['allow r', ('mon', ['allow r',
'allow command "osd blacklist"']), 'allow command "osd blacklist"',
'allow command "osd blocklist"']),
('osd', ['allow rwx']), ('osd', ['allow rwx']),
]) ])
@ -1166,7 +1182,10 @@ osd_upgrade_caps = collections.OrderedDict([
]) ])
rbd_mirror_caps = collections.OrderedDict([ rbd_mirror_caps = collections.OrderedDict([
('mon', ['profile rbd; allow r']), ('mon', ['allow profile rbd-mirror-peer',
'allow command "service dump"',
'allow command "service status"'
]),
('osd', ['profile rbd']), ('osd', ['profile rbd']),
('mgr', ['allow r']), ('mgr', ['allow r']),
]) ])
@ -1212,28 +1231,15 @@ def get_named_key(name, caps=None, pool_list=None):
:param caps: dict of cephx capabilities :param caps: dict of cephx capabilities
:returns: Returns a cephx key :returns: Returns a cephx key
""" """
key_name = 'client.{}'.format(name)
try:
# Does the key already exist?
output = str(subprocess.check_output(
[
'sudo',
'-u', ceph_user(),
'ceph',
'--name', 'mon.',
'--keyring',
'/var/lib/ceph/mon/ceph-{}/keyring'.format(
socket.gethostname()
),
'auth',
'get',
key_name,
]).decode('UTF-8')).strip()
return parse_key(output)
except subprocess.CalledProcessError:
# Couldn't get the key, time to create it!
log("Creating new key for {}".format(name), level=DEBUG)
caps = caps or _default_caps caps = caps or _default_caps
key_name = 'client.{}'.format(name)
key = ceph_auth_get(key_name)
if key:
upgrade_key_caps(key_name, caps)
return key
log("Creating new key for {}".format(name), level=DEBUG)
cmd = [ cmd = [
"sudo", "sudo",
"-u", "-u",
@ -1255,6 +1261,7 @@ def get_named_key(name, caps=None, pool_list=None):
pools = " ".join(['pool={0}'.format(i) for i in pool_list]) pools = " ".join(['pool={0}'.format(i) for i in pool_list])
subcaps[0] = subcaps[0] + " " + pools subcaps[0] = subcaps[0] + " " + pools
cmd.extend([subsystem, '; '.join(subcaps)]) cmd.extend([subsystem, '; '.join(subcaps)])
ceph_auth_get.cache_clear()
log("Calling check_output: {}".format(cmd), level=DEBUG) log("Calling check_output: {}".format(cmd), level=DEBUG)
return parse_key(str(subprocess return parse_key(str(subprocess
@ -1263,6 +1270,30 @@ def get_named_key(name, caps=None, pool_list=None):
.strip()) # IGNORE:E1103 .strip()) # IGNORE:E1103
@functools.lru_cache()
def ceph_auth_get(key_name):
try:
# Does the key already exist?
output = str(subprocess.check_output(
[
'sudo',
'-u', ceph_user(),
'ceph',
'--name', 'mon.',
'--keyring',
'/var/lib/ceph/mon/ceph-{}/keyring'.format(
socket.gethostname()
),
'auth',
'get',
key_name,
]).decode('UTF-8')).strip()
return parse_key(output)
except subprocess.CalledProcessError:
# Couldn't get the key
pass
def upgrade_key_caps(key, caps, pool_list=None): def upgrade_key_caps(key, caps, pool_list=None):
"""Upgrade key to have capabilities caps""" """Upgrade key to have capabilities caps"""
if not is_leader(): if not is_leader():
@ -2063,7 +2094,7 @@ def filesystem_mounted(fs):
def get_running_osds(): def get_running_osds():
"""Returns a list of the pids of the current running OSD daemons""" """Returns a list of the pids of the current running OSD daemons"""
cmd = ['pgrep', 'ceph-osd'] cmd = ['pgrep', 'ceph-osd|crimson-osd']
try: try:
result = str(subprocess.check_output(cmd).decode('UTF-8')) result = str(subprocess.check_output(cmd).decode('UTF-8'))
return result.split() return result.split()
@ -2514,7 +2545,7 @@ class WatchDog(object):
:type timeout: int :type timeout: int
""" """
start_time = time.time() start_time = time.time()
while(not wait_f()): while not wait_f():
now = time.time() now = time.time()
if now > start_time + timeout: if now > start_time + timeout:
raise WatchDog.WatchDogTimeoutException() raise WatchDog.WatchDogTimeoutException()
@ -3215,6 +3246,9 @@ UCA_CODENAME_MAP = {
'wallaby': 'pacific', 'wallaby': 'pacific',
'xena': 'pacific', 'xena': 'pacific',
'yoga': 'quincy', 'yoga': 'quincy',
'zed': 'quincy',
'antelope': 'quincy',
'bobcat': 'quincy',
} }
@ -3414,7 +3448,7 @@ def apply_osd_settings(settings):
set_cmd = base_cmd + ' set {key} {value}' set_cmd = base_cmd + ' set {key} {value}'
def _get_cli_key(key): def _get_cli_key(key):
return(key.replace(' ', '_')) return key.replace(' ', '_')
# Retrieve the current values to check keys are correct and to make this a # Retrieve the current values to check keys are correct and to make this a
# noop if setting are already applied. # noop if setting are already applied.
for osd_id in get_local_osd_ids(): for osd_id in get_local_osd_ids():
@ -3453,6 +3487,9 @@ def enabled_manager_modules():
:rtype: List[str] :rtype: List[str]
""" """
cmd = ['ceph', 'mgr', 'module', 'ls'] cmd = ['ceph', 'mgr', 'module', 'ls']
quincy_or_later = cmp_pkgrevno('ceph-common', '17.1.0') >= 0
if quincy_or_later:
cmd.append('--format=json')
try: try:
modules = subprocess.check_output(cmd).decode('UTF-8') modules = subprocess.check_output(cmd).decode('UTF-8')
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:

View File

@ -13,8 +13,8 @@ tags:
series: series:
- focal - focal
- jammy - jammy
- kinetic
- lunar - lunar
- mantic
description: | description: |
Ceph is a distributed storage and network file system designed to provide Ceph is a distributed storage and network file system designed to provide
excellent performance, reliability, and scalability. excellent performance, reliability, and scalability.

View File

@ -4,7 +4,6 @@
- charm-unit-jobs-py310 - charm-unit-jobs-py310
- charm-xena-functional-jobs - charm-xena-functional-jobs
- charm-yoga-functional-jobs - charm-yoga-functional-jobs
- charm-zed-functional-jobs
- charm-functional-jobs - charm-functional-jobs
vars: vars:
needs_charm_build: true needs_charm_build: true

View File

@ -1,5 +1,5 @@
variables: variables:
openstack-origin: &openstack-origin distro openstack-origin: &openstack-origin cloud:jammy-bobcat
series: jammy series: jammy
@ -47,8 +47,6 @@ applications:
mysql-innodb-cluster: mysql-innodb-cluster:
charm: ch:mysql-innodb-cluster charm: ch:mysql-innodb-cluster
num_units: 3 num_units: 3
options:
source: *openstack-origin
to: to:
- '0' - '0'
- '1' - '1'
@ -79,13 +77,11 @@ applications:
- '6' - '6'
- '7' - '7'
- '8' - '8'
channel: quincy/edge channel: latest/edge
rabbitmq-server: rabbitmq-server:
charm: ch:rabbitmq-server charm: ch:rabbitmq-server
num_units: 1 num_units: 1
options:
source: *openstack-origin
to: to:
- '9' - '9'
channel: latest/edge channel: latest/edge
@ -98,7 +94,7 @@ applications:
openstack-origin: *openstack-origin openstack-origin: *openstack-origin
to: to:
- '10' - '10'
channel: yoga/edge channel: latest/edge
nova-compute: nova-compute:
charm: ch:nova-compute charm: ch:nova-compute
@ -107,7 +103,7 @@ applications:
openstack-origin: *openstack-origin openstack-origin: *openstack-origin
to: to:
- '11' - '11'
channel: yoga/edge channel: latest/edge
glance: glance:
expose: True expose: True
@ -117,7 +113,7 @@ applications:
openstack-origin: *openstack-origin openstack-origin: *openstack-origin
to: to:
- '12' - '12'
channel: yoga/edge channel: latest/edge
cinder: cinder:
expose: True expose: True
@ -129,11 +125,11 @@ applications:
glance-api-version: '2' glance-api-version: '2'
to: to:
- '13' - '13'
channel: yoga/edge channel: latest/edge
cinder-ceph: cinder-ceph:
charm: ch:cinder-ceph charm: ch:cinder-ceph
channel: yoga/edge channel: latest/edge
nova-cloud-controller: nova-cloud-controller:
expose: True expose: True
@ -143,7 +139,7 @@ applications:
openstack-origin: *openstack-origin openstack-origin: *openstack-origin
to: to:
- '14' - '14'
channel: yoga/edge channel: latest/edge
placement: placement:
charm: ch:placement charm: ch:placement
@ -152,7 +148,7 @@ applications:
openstack-origin: *openstack-origin openstack-origin: *openstack-origin
to: to:
- '15' - '15'
channel: yoga/edge channel: latest/edge
relations: relations:
- - 'nova-compute:amqp' - - 'nova-compute:amqp'

View File

@ -1,17 +1,22 @@
variables: variables:
openstack-origin: &openstack-origin cloud:jammy-zed openstack-origin: &openstack-origin distro
# use infra (mysql, rabbit) from lts for stability
infra-series: &infra-series jammy
series: jammy series: mantic
comment: comment:
- 'machines section to decide order of deployment. database sooner = faster' - 'machines section to decide order of deployment. database sooner = faster'
machines: machines:
'0': '0':
constraints: mem=3072M constraints: mem=3072M
series: *infra-series
'1': '1':
constraints: mem=3072M constraints: mem=3072M
series: *infra-series
'2': '2':
constraints: mem=3072M constraints: mem=3072M
series: *infra-series
'3': '3':
'4': '4':
'5': '5':
@ -19,30 +24,37 @@ machines:
'7': '7':
'8': '8':
'9': '9':
series: *infra-series
'10': '10':
series: *infra-series
'11': '11':
series: *infra-series
'12': '12':
series: *infra-series
'13': '13':
series: *infra-series
'14': '14':
series: *infra-series
'15': '15':
series: *infra-series
applications: applications:
keystone-mysql-router: keystone-mysql-router:
charm: ch:mysql-router charm: ch:mysql-router
channel: 8.0/edge channel: latest/edge
glance-mysql-router: glance-mysql-router:
charm: ch:mysql-router charm: ch:mysql-router
channel: 8.0/edge channel: latest/edge
cinder-mysql-router: cinder-mysql-router:
charm: ch:mysql-router charm: ch:mysql-router
channel: 8.0/edge channel: latest/edge
nova-cloud-controller-mysql-router: nova-cloud-controller-mysql-router:
charm: ch:mysql-router charm: ch:mysql-router
channel: 8.0/edge channel: latest/edge
placement-mysql-router: placement-mysql-router:
charm: ch:mysql-router charm: ch:mysql-router
channel: 8.0/edge channel: latest/edge
mysql-innodb-cluster: mysql-innodb-cluster:
charm: ch:mysql-innodb-cluster charm: ch:mysql-innodb-cluster
@ -51,7 +63,7 @@ applications:
- '0' - '0'
- '1' - '1'
- '2' - '2'
channel: 8.0/edge channel: latest/edge
ceph-osd: ceph-osd:
charm: ../../ceph-osd.charm charm: ../../ceph-osd.charm
@ -77,14 +89,14 @@ applications:
- '6' - '6'
- '7' - '7'
- '8' - '8'
channel: quincy/edge channel: latest/edge
rabbitmq-server: rabbitmq-server:
charm: ch:rabbitmq-server charm: ch:rabbitmq-server
num_units: 1 num_units: 1
to: to:
- '9' - '9'
channel: 3.9/edge channel: latest/edge
keystone: keystone:
expose: True expose: True
@ -94,7 +106,7 @@ applications:
openstack-origin: *openstack-origin openstack-origin: *openstack-origin
to: to:
- '10' - '10'
channel: zed/edge channel: latest/edge
nova-compute: nova-compute:
charm: ch:nova-compute charm: ch:nova-compute
@ -103,7 +115,7 @@ applications:
openstack-origin: *openstack-origin openstack-origin: *openstack-origin
to: to:
- '11' - '11'
channel: zed/edge channel: latest/edge
glance: glance:
expose: True expose: True
@ -113,7 +125,7 @@ applications:
openstack-origin: *openstack-origin openstack-origin: *openstack-origin
to: to:
- '12' - '12'
channel: zed/edge channel: latest/edge
cinder: cinder:
expose: True expose: True
@ -125,11 +137,11 @@ applications:
glance-api-version: '2' glance-api-version: '2'
to: to:
- '13' - '13'
channel: zed/edge channel: latest/edge
cinder-ceph: cinder-ceph:
charm: ch:cinder-ceph charm: ch:cinder-ceph
channel: zed/edge channel: latest/edge
nova-cloud-controller: nova-cloud-controller:
expose: True expose: True
@ -139,7 +151,7 @@ applications:
openstack-origin: *openstack-origin openstack-origin: *openstack-origin
to: to:
- '14' - '14'
channel: zed/edge channel: latest/edge
placement: placement:
charm: ch:placement charm: ch:placement
@ -148,7 +160,7 @@ applications:
openstack-origin: *openstack-origin openstack-origin: *openstack-origin
to: to:
- '15' - '15'
channel: zed/edge channel: latest/edge
relations: relations:
- - 'nova-compute:amqp' - - 'nova-compute:amqp'

View File

@ -4,6 +4,8 @@ gate_bundles:
- focal-xena - focal-xena
- focal-yoga - focal-yoga
- jammy-yoga - jammy-yoga
- jammy-bobcat
- mantic-bobcat
smoke_bundles: smoke_bundles:
- focal-xena - focal-xena