Add 2023.2 Bobcat support

* sync charm-helpers to classic charms
* change openstack-origin/source default to quincy
* add mantic to metadata series
* align testing with bobcat
* add new bobcat bundles
* add bobcat bundles to tests.yaml
* add bobcat tests to osci.yaml
* update build-on and run-on bases
* drop kinetic
* add additional unit test https mocks needed since
  charm-helpers commit 6064a34627882d1c8acf74644c48d05db67ee3b4
* update charmcraft_channel to 2.x/stable

Change-Id: I2d9c41c294668c3bb7fcba253adb8bc0c939d150
This commit is contained in:
Corey Bryant 2023-07-18 16:47:18 -04:00
parent 7feab3a45b
commit 37cb69d7f8
26 changed files with 238 additions and 362 deletions

View File

@ -33,9 +33,9 @@ bases:
- name: ubuntu
channel: "22.04"
architectures: [amd64, s390x, ppc64el, arm64]
- name: ubuntu
channel: "22.10"
architectures: [amd64, s390x, ppc64el, arm64]
- name: ubuntu
channel: "23.04"
architectures: [amd64, s390x, ppc64el, arm64]
- name: ubuntu
channel: "23.10"
architectures: [amd64, s390x, ppc64el, arm64]

View File

@ -5,7 +5,7 @@ options:
description: RadosGW debug level. Max is 20.
source:
type: string
default: yoga
default: quincy
description: |
Optional repository from which to install. May be one of the following:
distro (default), ppa:somecustom/ppa, a deb url sources entry,

View File

@ -221,6 +221,13 @@ def https():
return True
if config_get('ssl_cert') and config_get('ssl_key'):
return True
# Local import to avoid ciruclar dependency.
import charmhelpers.contrib.openstack.cert_utils as cert_utils
if (
cert_utils.get_certificate_request() and not
cert_utils.get_requests_for_local_unit("certificates")
):
return False
for r_id in relation_ids('certificates'):
for unit in relation_list(r_id):
ca = relation_get('ca', rid=r_id, unit=unit)

View File

@ -409,6 +409,9 @@ def get_requests_for_local_unit(relation_name=None):
relation_name = relation_name or 'certificates'
bundles = []
for rid in relation_ids(relation_name):
sent = relation_get(rid=rid, unit=local_unit())
legacy_keys = ['certificate_name', 'common_name']
is_legacy_request = set(sent).intersection(legacy_keys)
for unit in related_units(rid):
data = relation_get(rid=rid, unit=unit)
if data.get(raw_certs_key):
@ -416,6 +419,14 @@ def get_requests_for_local_unit(relation_name=None):
'ca': data['ca'],
'chain': data.get('chain'),
'certs': json.loads(data[raw_certs_key])})
elif is_legacy_request:
bundles.append({
'ca': data['ca'],
'chain': data.get('chain'),
'certs': {sent['common_name']:
{'cert': data.get(local_name + '.server.cert'),
'key': data.get(local_name + '.server.key')}}})
return bundles

View File

@ -1748,6 +1748,9 @@ class WSGIWorkerConfigContext(WorkerConfigContext):
def __call__(self):
total_processes = _calculate_workers()
enable_wsgi_rotation = config('wsgi-rotation')
if enable_wsgi_rotation is None:
enable_wsgi_rotation = True
ctxt = {
"service_name": self.service_name,
"user": self.user,
@ -1761,6 +1764,7 @@ class WSGIWorkerConfigContext(WorkerConfigContext):
"public_processes": int(math.ceil(self.public_process_weight *
total_processes)),
"threads": 1,
"wsgi_rotation": enable_wsgi_rotation,
}
return ctxt

View File

@ -12,6 +12,8 @@ signing_dir = {{ signing_dir }}
{% if service_type -%}
service_type = {{ service_type }}
{% endif -%}
{% if admin_role -%}
service_token_roles = {{ admin_role }}
service_token_roles_required = True
{% endif -%}
{% endif -%}

View File

@ -22,6 +22,8 @@ signing_dir = {{ signing_dir }}
{% if use_memcache == true %}
memcached_servers = {{ memcache_url }}
{% endif -%}
{% if admin_role -%}
service_token_roles = {{ admin_role }}
service_token_roles_required = True
{% endif -%}
{% endif -%}

View File

@ -3,8 +3,8 @@
send_service_user_token = true
auth_type = password
auth_url = {{ auth_protocol }}://{{ auth_host }}:{{ auth_port }}
project_domain_id = default
user_domain_id = default
project_domain_name = service_domain
user_domain_name = service_domain
project_name = {{ admin_tenant_name }}
username = {{ admin_user }}
password = {{ admin_password }}

View File

@ -12,6 +12,12 @@ Listen {{ admin_port }}
Listen {{ public_port }}
{% endif -%}
{% if wsgi_rotation -%}
WSGISocketRotation On
{% else -%}
WSGISocketRotation Off
{% endif -%}
{% if port -%}
<VirtualHost *:{{ port }}>
WSGIDaemonProcess {{ service_name }} processes={{ processes }} threads={{ threads }} user={{ user }} group={{ group }} \

View File

@ -12,6 +12,12 @@ Listen {{ admin_port }}
Listen {{ public_port }}
{% endif -%}
{% if wsgi_rotation -%}
WSGISocketRotation On
{% else -%}
WSGISocketRotation Off
{% endif -%}
{% if port -%}
<VirtualHost *:{{ port }}>
WSGIDaemonProcess {{ service_name }} processes={{ processes }} threads={{ threads }} user={{ user }} group={{ group }} \

View File

@ -160,6 +160,7 @@ OPENSTACK_CODENAMES = OrderedDict([
('2022.1', 'yoga'),
('2022.2', 'zed'),
('2023.1', 'antelope'),
('2023.2', 'bobcat'),
])
# The ugly duckling - must list releases oldest to newest
@ -957,7 +958,7 @@ def os_requires_version(ostack_release, pkg):
def wrap(f):
@wraps(f)
def wrapped_f(*args):
if os_release(pkg) < ostack_release:
if CompareOpenStackReleases(os_release(pkg)) < ostack_release:
raise Exception("This hook is not supported on releases"
" before %s" % ostack_release)
f(*args)

View File

@ -28,7 +28,6 @@ import os
import shutil
import json
import time
import uuid
from subprocess import (
check_call,
@ -1677,6 +1676,10 @@ class CephBrokerRq(object):
The API is versioned and defaults to version 1.
"""
# The below hash is the result of running
# `hashlib.sha1('[]'.encode()).hexdigest()`
EMPTY_LIST_SHA = '97d170e1550eee4afc0af065b78cda302a97674c'
def __init__(self, api_version=1, request_id=None, raw_request_data=None):
"""Initialize CephBrokerRq object.
@ -1685,8 +1688,12 @@ class CephBrokerRq(object):
:param api_version: API version for request (default: 1).
:type api_version: Optional[int]
:param request_id: Unique identifier for request.
(default: string representation of generated UUID)
:param request_id: Unique identifier for request. The identifier will
be updated as ops are added or removed from the
broker request. This ensures that Ceph will
correctly process requests where operations are
added after the initial request is processed.
(default: sha1 of operations)
:type request_id: Optional[str]
:param raw_request_data: JSON-encoded string to build request from.
:type raw_request_data: Optional[str]
@ -1695,16 +1702,20 @@ class CephBrokerRq(object):
if raw_request_data:
request_data = json.loads(raw_request_data)
self.api_version = request_data['api-version']
self.request_id = request_data['request-id']
self.set_ops(request_data['ops'])
self.request_id = request_data['request-id']
else:
self.api_version = api_version
if request_id:
self.request_id = request_id
else:
self.request_id = str(uuid.uuid1())
self.request_id = CephBrokerRq.EMPTY_LIST_SHA
self.ops = []
def _hash_ops(self):
"""Return the sha1 of the requested Broker ops."""
return hashlib.sha1(json.dumps(self.ops, sort_keys=True).encode()).hexdigest()
def add_op(self, op):
"""Add an op if it is not already in the list.
@ -1713,6 +1724,7 @@ class CephBrokerRq(object):
"""
if op not in self.ops:
self.ops.append(op)
self.request_id = self._hash_ops()
def add_op_request_access_to_group(self, name, namespace=None,
permission=None, key_name=None,
@ -1991,6 +2003,7 @@ class CephBrokerRq(object):
to allow comparisons to ensure validity.
"""
self.ops = ops
self.request_id = self._hash_ops()
@property
def request(self):

View File

@ -32,6 +32,7 @@ UBUNTU_RELEASES = (
'jammy',
'kinetic',
'lunar',
'mantic',
)

View File

@ -238,6 +238,14 @@ CLOUD_ARCHIVE_POCKETS = {
'antelope/proposed': 'jammy-proposed/antelope',
'jammy-antelope/proposed': 'jammy-proposed/antelope',
'jammy-proposed/antelope': 'jammy-proposed/antelope',
# bobcat
'bobcat': 'jammy-updates/bobcat',
'jammy-bobcat': 'jammy-updates/bobcat',
'jammy-bobcat/updates': 'jammy-updates/bobcat',
'jammy-updates/bobcat': 'jammy-updates/bobcat',
'bobcat/proposed': 'jammy-proposed/bobcat',
'jammy-bobcat/proposed': 'jammy-proposed/bobcat',
'jammy-proposed/bobcat': 'jammy-proposed/bobcat',
# OVN
'focal-ovn-22.03': 'focal-updates/ovn-22.03',
@ -270,6 +278,7 @@ OPENSTACK_RELEASES = (
'yoga',
'zed',
'antelope',
'bobcat',
)
@ -298,6 +307,7 @@ UBUNTU_OPENSTACK_RELEASE = OrderedDict([
('jammy', 'yoga'),
('kinetic', 'zed'),
('lunar', 'antelope'),
('mantic', 'bobcat'),
])
@ -591,7 +601,7 @@ def _get_key_by_keyid(keyid):
curl_cmd = ['curl', keyserver_url.format(keyid)]
# use proxy server settings in order to retrieve the key
return subprocess.check_output(curl_cmd,
env=env_proxy_settings(['https']))
env=env_proxy_settings(['https', 'no_proxy']))
def _dearmor_gpg_key(key_asc):

View File

@ -122,13 +122,12 @@ class Cache(object):
:raises: subprocess.CalledProcessError
"""
pkgs = {}
cmd = ['dpkg-query', '--list']
cmd = [
'dpkg-query', '--show',
'--showformat',
r'${db:Status-Abbrev}\t${Package}\t${Version}\t${Architecture}\t${binary:Summary}\n'
]
cmd.extend(packages)
if locale.getlocale() == (None, None):
# subprocess calls out to locale.getpreferredencoding(False) to
# determine encoding. Workaround for Trusty where the
# environment appears to not be set up correctly.
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
try:
output = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
@ -140,24 +139,17 @@ class Cache(object):
if cp.returncode != 1:
raise
output = cp.output
headings = []
for line in output.splitlines():
if line.startswith('||/'):
headings = line.split()
headings.pop(0)
# only process lines for successfully installed packages
if not (line.startswith('ii ') or line.startswith('hi ')):
continue
elif (line.startswith('|') or line.startswith('+') or
line.startswith('dpkg-query:')):
continue
else:
data = line.split(None, 4)
status = data.pop(0)
if status not in ('ii', 'hi'):
continue
pkg = {}
pkg.update({k.lower(): v for k, v in zip(headings, data)})
if 'name' in pkg:
pkgs.update({pkg['name']: pkg})
status, name, version, arch, desc = line.split('\t', 4)
pkgs[name] = {
'name': name,
'version': version,
'architecture': arch,
'description': desc,
}
return pkgs
def _apt_cache_show(self, packages):

View File

@ -681,24 +681,39 @@ def _get_osd_num_from_dirname(dirname):
return match.group('osd_id')
def get_crimson_osd_ids():
"""Return a set of the OSDs that are running with the Crimson backend."""
rv = set()
try:
out = subprocess.check_output(['pgrep', 'crimson-osd', '-a'])
for line in out.decode('utf8').splitlines():
rv.add(line.split()[-1])
except Exception:
pass
return rv
def get_local_osd_ids():
"""This will list the /var/lib/ceph/osd/* directories and try
to split the ID off of the directory name and return it in
a list.
a list. Excludes crimson OSD's from the returned list.
:returns: list. A list of OSD identifiers
:raises: OSError if something goes wrong with listing the directory.
"""
osd_ids = []
crimson_osds = get_crimson_osd_ids()
osd_path = os.path.join(os.sep, 'var', 'lib', 'ceph', 'osd')
if os.path.exists(osd_path):
try:
dirs = os.listdir(osd_path)
for osd_dir in dirs:
osd_id = osd_dir.split('-')[1]
osd_id = osd_dir.split('-')[1] if '-' in osd_dir else ''
if (_is_int(osd_id) and
filesystem_mounted(os.path.join(
os.sep, osd_path, osd_dir))):
os.sep, osd_path, osd_dir)) and
osd_id not in crimson_osds):
osd_ids.append(osd_id)
except OSError:
raise
@ -1216,28 +1231,15 @@ def get_named_key(name, caps=None, pool_list=None):
:param caps: dict of cephx capabilities
:returns: Returns a cephx key
"""
key_name = 'client.{}'.format(name)
try:
# Does the key already exist?
output = str(subprocess.check_output(
[
'sudo',
'-u', ceph_user(),
'ceph',
'--name', 'mon.',
'--keyring',
'/var/lib/ceph/mon/ceph-{}/keyring'.format(
socket.gethostname()
),
'auth',
'get',
key_name,
]).decode('UTF-8')).strip()
return parse_key(output)
except subprocess.CalledProcessError:
# Couldn't get the key, time to create it!
log("Creating new key for {}".format(name), level=DEBUG)
caps = caps or _default_caps
key_name = 'client.{}'.format(name)
key = ceph_auth_get(key_name)
if key:
upgrade_key_caps(key_name, caps)
return key
log("Creating new key for {}".format(name), level=DEBUG)
cmd = [
"sudo",
"-u",
@ -1259,6 +1261,7 @@ def get_named_key(name, caps=None, pool_list=None):
pools = " ".join(['pool={0}'.format(i) for i in pool_list])
subcaps[0] = subcaps[0] + " " + pools
cmd.extend([subsystem, '; '.join(subcaps)])
ceph_auth_get.cache_clear()
log("Calling check_output: {}".format(cmd), level=DEBUG)
return parse_key(str(subprocess
@ -1267,6 +1270,30 @@ def get_named_key(name, caps=None, pool_list=None):
.strip()) # IGNORE:E1103
@functools.lru_cache()
def ceph_auth_get(key_name):
try:
# Does the key already exist?
output = str(subprocess.check_output(
[
'sudo',
'-u', ceph_user(),
'ceph',
'--name', 'mon.',
'--keyring',
'/var/lib/ceph/mon/ceph-{}/keyring'.format(
socket.gethostname()
),
'auth',
'get',
key_name,
]).decode('UTF-8')).strip()
return parse_key(output)
except subprocess.CalledProcessError:
# Couldn't get the key
pass
def upgrade_key_caps(key, caps, pool_list=None):
"""Upgrade key to have capabilities caps"""
if not is_leader():
@ -2067,7 +2094,7 @@ def filesystem_mounted(fs):
def get_running_osds():
"""Returns a list of the pids of the current running OSD daemons"""
cmd = ['pgrep', 'ceph-osd']
cmd = ['pgrep', 'ceph-osd|crimson-osd']
try:
result = str(subprocess.check_output(cmd).decode('UTF-8'))
return result.split()
@ -2518,7 +2545,7 @@ class WatchDog(object):
:type timeout: int
"""
start_time = time.time()
while(not wait_f()):
while not wait_f():
now = time.time()
if now > start_time + timeout:
raise WatchDog.WatchDogTimeoutException()
@ -3219,6 +3246,9 @@ UCA_CODENAME_MAP = {
'wallaby': 'pacific',
'xena': 'pacific',
'yoga': 'quincy',
'zed': 'quincy',
'antelope': 'quincy',
'bobcat': 'quincy',
}
@ -3418,7 +3448,7 @@ def apply_osd_settings(settings):
set_cmd = base_cmd + ' set {key} {value}'
def _get_cli_key(key):
return(key.replace(' ', '_'))
return key.replace(' ', '_')
# Retrieve the current values to check keys are correct and to make this a
# noop if setting are already applied.
for osd_id in get_local_osd_ids():

View File

@ -15,7 +15,8 @@ tags:
series:
- focal
- jammy
- kinetic
- lunar
- mantic
extra-bindings:
public:
admin:

104
osci.yaml
View File

@ -9,37 +9,37 @@
- vault-focal-yoga-namespaced
- focal-yoga-multisite
- jammy-yoga-multisite
- jammy-zed-multisite:
voting: false
- jammy-antelope-multisite:
voting: false
- kinetic-zed-multisite:
- jammy-bobcat-multisite:
voting: false
- lunar-antelope-multisite:
voting: false
- mantic-bobcat-multisite:
voting: false
- vault-jammy-yoga_rgw
- vault-jammy-yoga-namespaced
- vault-jammy-zed_rgw:
voting: false
- vault-jammy-zed-namespaced:
voting: false
- vault-jammy-antelope_rgw:
voting: false
- vault-jammy-antelope-namespaced:
voting: false
- vault-kinetic-zed_rgw:
- vault-jammy-bobcat_rgw:
voting: false
- vault-kinetic-zed-namespaced:
- vault-jammy-bobcat-namespaced:
voting: false
- vault-lunar-antelope_rgw:
voting: false
- vault-lunar-antelope-namespaced:
voting: false
- vault-mantic-bobcat_rgw:
voting: false
- vault-mantic-bobcat-namespaced:
voting: false
vars:
needs_charm_build: true
charm_build_name: ceph-radosgw
build_type: charmcraft
charmcraft_channel: 2.1/stable
charmcraft_channel: 2.x/stable
- job:
name: focal-yoga-multisite
parent: func-target
@ -62,13 +62,6 @@
soft: true
vars:
tox_extra_args: '-- jammy-yoga-multisite'
- job:
name: jammy-zed-multisite
parent: func-target
dependencies:
- jammy-yoga-multisite
vars:
tox_extra_args: '-- jammy-zed-multisite'
- job:
name: jammy-antelope-multisite
parent: func-target
@ -77,12 +70,12 @@
vars:
tox_extra_args: '-- jammy-antelope-multisite'
- job:
name: kinetic-zed-multisite
name: jammy-bobcat-multisite
parent: func-target
dependencies:
- jammy-yoga-multisite
vars:
tox_extra_args: '-- kinetic-zed-multisite'
tox_extra_args: '-- jammy-bobcat-multisite'
- job:
name: lunar-antelope-multisite
parent: func-target
@ -90,6 +83,13 @@
- jammy-yoga-multisite
vars:
tox_extra_args: '-- lunar-antelope-multisite'
- job:
name: mantic-bobcat-multisite
parent: func-target
dependencies:
- jammy-yoga-multisite
vars:
tox_extra_args: '-- mantic-bobcat-multisite'
- job:
name: vault-focal-yoga_rgw
parent: func-target
@ -118,13 +118,6 @@
- jammy-yoga-multisite
vars:
tox_extra_args: '-- vault:jammy-yoga-namespaced'
- job:
name: vault-jammy-zed-namespaced
parent: func-target
dependencies:
- jammy-yoga-multisite
vars:
tox_extra_args: '-- vault:jammy-zed-namespaced'
- job:
name: vault-jammy-antelope-namespaced
parent: func-target
@ -133,37 +126,12 @@
vars:
tox_extra_args: '-- vault:jammy-antelope-namespaced'
- job:
name: vault-jammy-zed_rgw
name: vault-jammy-bobcat-namespaced
parent: func-target
dependencies:
- vault-jammy-yoga_rgw
- vault-jammy-yoga-namespaced
- jammy-yoga-multisite
vars:
tox_extra_args: '-- vault:jammy-zed'
- job:
name: vault-jammy-zed-namespaced
parent: func-target
dependencies:
- vault-jammy-yoga_rgw
- vault-jammy-yoga-namespaced
vars:
tox_extra_args: '-- vault:jammy-zed-namespaced'
- job:
name: vault-kinetic-zed_rgw
parent: func-target
dependencies:
- vault-jammy-yoga_rgw
- vault-jammy-yoga-namespaced
vars:
tox_extra_args: '-- vault:kinetic-zed'
- job:
name: vault-kinetic-zed-namespaced
parent: func-target
dependencies:
- vault-jammy-yoga_rgw
- vault-jammy-yoga-namespaced
vars:
tox_extra_args: '-- vault:kinetic-zed-namespaced'
tox_extra_args: '-- vault:jammy-bobcat-namespaced'
- job:
name: vault-jammy-antelope_rgw
parent: func-target
@ -173,13 +141,13 @@
vars:
tox_extra_args: '-- vault:jammy-antelope'
- job:
name: vault-lunar-antelope_rgw
name: vault-jammy-bobcat_rgw
parent: func-target
dependencies:
- vault-jammy-yoga_rgw
- vault-jammy-yoga-namespaced
vars:
tox_extra_args: '-- vault:lunar-antelope'
tox_extra_args: '-- vault:jammy-bobcat'
- job:
name: vault-lunar-antelope-namespaced
parent: func-target
@ -188,3 +156,27 @@
- vault-jammy-yoga-namespaced
vars:
tox_extra_args: '-- vault:lunar-antelope-namespaced'
- job:
name: vault-mantic-bobcat-namespaced
parent: func-target
dependencies:
- vault-jammy-yoga_rgw
- vault-jammy-yoga-namespaced
vars:
tox_extra_args: '-- vault:mantic-bobcat-namespaced'
- job:
name: vault-lunar-antelope_rgw
parent: func-target
dependencies:
- vault-jammy-yoga_rgw
- vault-jammy-yoga-namespaced
vars:
tox_extra_args: '-- vault:lunar-antelope'
- job:
name: vault-mantic-bobcat_rgw
parent: func-target
dependencies:
- vault-jammy-yoga_rgw
- vault-jammy-yoga-namespaced
vars:
tox_extra_args: '-- vault:mantic-bobcat'

View File

@ -1,5 +1,5 @@
options:
source: &source cloud:jammy-zed
source: &source cloud:jammy-bobcat
series: jammy

View File

@ -1,5 +1,5 @@
options:
source: &source cloud:jammy-zed
source: &source cloud:jammy-bobcat
series: jammy

View File

@ -1,5 +1,5 @@
options:
source: &source cloud:jammy-zed
source: &source cloud:jammy-bobcat
series: jammy

View File

@ -1,99 +0,0 @@
options:
source: &source distro
series: kinetic
comment:
- 'machines section to decide order of deployment. database sooner = faster'
machines:
'0':
'1':
'2':
'3':
'4':
'5':
'6':
'7':
'8':
'9':
applications:
ceph-radosgw:
charm: ../../ceph-radosgw.charm
num_units: 1
options:
source: *source
to:
- '0'
secondary-ceph-radosgw:
charm: ../../ceph-radosgw.charm
num_units: 1
options:
source: *source
to:
- '1'
ceph-osd:
charm: ch:ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
to:
- '2'
- '6'
- '7'
channel: latest/edge
secondary-ceph-osd:
charm: ch:ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
to:
- '3'
- '8'
- '9'
channel: latest/edge
ceph-mon:
charm: ch:ceph-mon
num_units: 1
options:
monitor-count: 1
source: *source
to:
- '4'
channel: latest/edge
secondary-ceph-mon:
charm: ch:ceph-mon
num_units: 1
options:
monitor-count: 1
source: *source
to:
- '5'
channel: latest/edge
relations:
- - 'ceph-osd:mon'
- 'ceph-mon:osd'
- - 'ceph-radosgw:mon'
- 'ceph-mon:radosgw'
- - 'secondary-ceph-osd:mon'
- 'secondary-ceph-mon:osd'
- - 'secondary-ceph-radosgw:mon'
- 'secondary-ceph-mon:radosgw'

View File

@ -1,124 +0,0 @@
options:
source: &source distro
series: kinetic
comment:
- 'machines section to decide order of deployment. database sooner = faster'
machines:
'0':
constraints: mem=3072M
'1':
constraints: mem=3072M
'2':
constraints: mem=3072M
'3':
'4':
'5':
'6':
'7':
'8':
'9':
'10':
'11':
applications:
keystone-mysql-router:
charm: ch:mysql-router
channel: latest/edge
mysql-innodb-cluster:
charm: ch:mysql-innodb-cluster
num_units: 3
options:
source: *source
to:
- '0'
- '1'
- '2'
channel: latest/edge
ceph-radosgw:
charm: ../../ceph-radosgw.charm
num_units: 1
options:
source: *source
namespace-tenants: True
to:
- '3'
ceph-osd:
charm: ch:ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
to:
- '4'
- '5'
- '6'
channel: latest/edge
ceph-mon:
charm: ch:ceph-mon
num_units: 3
options:
source: *source
to:
- '7'
- '8'
- '9'
channel: latest/edge
keystone:
expose: True
charm: ch:keystone
num_units: 1
options:
openstack-origin: *source
to:
- '10'
channel: latest/edge
vault-mysql-router:
charm: ch:mysql-router
channel: latest/edge
vault:
charm: ch:vault
num_units: 1
to:
- '11'
channel: latest/edge
relations:
- - 'keystone:shared-db'
- 'keystone-mysql-router:shared-db'
- - 'keystone-mysql-router:db-router'
- 'mysql-innodb-cluster:db-router'
- - 'ceph-osd:mon'
- 'ceph-mon:osd'
- - 'ceph-radosgw:mon'
- 'ceph-mon:radosgw'
- - 'ceph-radosgw:identity-service'
- 'keystone:identity-service'
- - 'vault-mysql-router:db-router'
- 'mysql-innodb-cluster:db-router'
- - 'vault:shared-db'
- 'vault-mysql-router:shared-db'
- - 'keystone:certificates'
- 'vault:certificates'
- - 'ceph-radosgw:certificates'
- 'vault:certificates'

View File

@ -1,7 +1,7 @@
options:
source: &source distro
source: &source cloud:mantic-bobcat
series: kinetic
series: mantic
comment:
- 'machines section to decide order of deployment. database sooner = faster'

View File

@ -14,20 +14,20 @@ smoke_bundles:
dev_bundles:
- jammy-yoga-multisite
- jammy-zed-multisite
- lunar-antelope-multisite
- kinetic-zed-multisite
- mantic-bobcat-multisite
- jammy-antelope-multisite
- jammy-bobcat-multisite
- vault: jammy-yoga
- vault: jammy-yoga-namespaced
- vault: jammy-zed
- vault: lunar-antelope
- vault: jammy-zed-namespaced
- vault: mantic-bobcat
- vault: lunar-antelope-namespaced
- vault: kinetic-zed
- vault: mantic-bobcat-namespaced
- vault: jammy-antelope
- vault: kinetic-zed-namespaced
- vault: jammy-bobcat
- vault: jammy-antelope-namespaced
- vault: jammy-bobcat-namespaced
target_deploy_status:
vault:
@ -48,7 +48,7 @@ tests:
tests_options:
force_deploy:
- kinetic-zed
- jammy-antelope
- kinetic-zed-namespaced
- jammy-bobcat
- jammy-antelope-namespaced
- jammy-bobcat-namespaced

View File

@ -46,6 +46,7 @@ class HAProxyContextTests(CharmTestCase):
self.cmp_pkgrevno.return_value = 1
self.arch.return_value = 'amd64'
@patch('ceph_radosgw_context.https')
@patch('charmhelpers.contrib.openstack.context.get_relation_ip')
@patch('charmhelpers.contrib.openstack.context.mkdir')
@patch('charmhelpers.contrib.openstack.context.local_unit')
@ -54,7 +55,9 @@ class HAProxyContextTests(CharmTestCase):
@patch('charmhelpers.contrib.openstack.context.relation_ids')
@patch('charmhelpers.contrib.hahelpers.cluster.relation_ids')
def test_ctxt(self, _harelation_ids, _ctxtrelation_ids, _haconfig,
_ctxtconfig, _local_unit, _mkdir, _get_relation_ip):
_ctxtconfig, _local_unit, _mkdir, _get_relation_ip,
_mock_https):
_mock_https.return_value = False
_get_relation_ip.return_value = '10.0.0.10'
_ctxtconfig.side_effect = self.test_config.get
_haconfig.side_effect = self.test_config.get
@ -96,14 +99,17 @@ class MonContextTest(CharmTestCase):
else:
return []
@patch('ceph_radosgw_context.https')
@patch('charmhelpers.contrib.hahelpers.cluster.relation_ids')
@patch('charmhelpers.contrib.hahelpers.cluster.config_get')
@patch.object(ceph, 'config', lambda *args:
'{"client.radosgw.gateway": {"rgw init timeout": 60}}')
@patch.object(context, 'ensure_host_resolvable_v6')
def test_ctxt(
self, mock_ensure_rsv_v6, mock_config_get, mock_relation_ids
self, mock_ensure_rsv_v6, mock_config_get, mock_relation_ids,
mock_https,
):
mock_https.return_value = False
mock_relation_ids.return_value = []
mock_config_get.side_effect = self.test_config.get
self.socket.gethostname.return_value = 'testhost'
@ -212,14 +218,17 @@ class MonContextTest(CharmTestCase):
self.assertEqual(expect, mon_ctxt())
self.assertTrue(mock_ensure_rsv_v6.called)
@patch('ceph_radosgw_context.https')
@patch('charmhelpers.contrib.hahelpers.cluster.relation_ids')
@patch('charmhelpers.contrib.hahelpers.cluster.config_get')
@patch.object(ceph, 'config', lambda *args:
'{"client.radosgw.gateway": {"rgw init timeout": 60}}')
@patch.object(context, 'ensure_host_resolvable_v6')
def test_list_of_addresses_from_ceph_proxy(
self, mock_ensure_rsv_v6, mock_config_get, mock_relation_ids
self, mock_ensure_rsv_v6, mock_config_get, mock_relation_ids,
mock_https,
):
mock_https.return_value = False
mock_relation_ids.return_value = []
mock_config_get.side_effect = self.test_config.get
self.socket.gethostname.return_value = 'testhost'
@ -273,11 +282,14 @@ class MonContextTest(CharmTestCase):
self.assertEqual(expect, mon_ctxt())
self.assertTrue(mock_ensure_rsv_v6.called)
@patch('ceph_radosgw_context.https')
@patch('charmhelpers.contrib.hahelpers.cluster.relation_ids')
@patch('charmhelpers.contrib.hahelpers.cluster.config_get')
@patch.object(ceph, 'config', lambda *args:
'{"client.radosgw.gateway": {"rgw init timeout": 60}}')
def test_ctxt_missing_data(self, mock_config_get, mock_relation_ids):
def test_ctxt_missing_data(self, mock_config_get, mock_relation_ids,
mock_https):
mock_https.return_value = False
mock_relation_ids.return_value = []
mock_config_get.side_effect = self.test_config.get
self.socket.gethostname.return_value = 'testhost'
@ -287,11 +299,14 @@ class MonContextTest(CharmTestCase):
self.related_units.return_value = ['ceph/0', 'ceph/1', 'ceph/2']
self.assertEqual({}, mon_ctxt())
@patch('ceph_radosgw_context.https')
@patch('charmhelpers.contrib.hahelpers.cluster.relation_ids')
@patch('charmhelpers.contrib.hahelpers.cluster.config_get')
@patch.object(ceph, 'config', lambda *args:
'{"client.radosgw.gateway": {"rgw init timeout": 60}}')
def test_ctxt_inconsistent_auths(self, mock_config_get, mock_relation_ids):
def test_ctxt_inconsistent_auths(self, mock_config_get, mock_relation_ids,
mock_https):
mock_https.return_value = False
mock_relation_ids.return_value = []
mock_config_get.side_effect = self.test_config.get
self.socket.gethostname.return_value = 'testhost'
@ -337,11 +352,14 @@ class MonContextTest(CharmTestCase):
}
self.assertEqual(expect, mon_ctxt())
@patch('ceph_radosgw_context.https')
@patch('charmhelpers.contrib.hahelpers.cluster.relation_ids')
@patch('charmhelpers.contrib.hahelpers.cluster.config_get')
@patch.object(ceph, 'config', lambda *args:
'{"client.radosgw.gateway": {"rgw init timeout": 60}}')
def test_ctxt_consistent_auths(self, mock_config_get, mock_relation_ids):
def test_ctxt_consistent_auths(self, mock_config_get, mock_relation_ids,
mock_https):
mock_https.return_value = False
mock_relation_ids.return_value = []
mock_config_get.side_effect = self.test_config.get
self.socket.gethostname.return_value = 'testhost'
@ -445,11 +463,14 @@ class MonContextTest(CharmTestCase):
_test_version = '16.2.0'
context.validate_http_frontend('beast')
@patch('ceph_radosgw_context.https')
@patch('charmhelpers.contrib.hahelpers.cluster.relation_ids')
@patch('charmhelpers.contrib.hahelpers.cluster.config_get')
@patch.object(ceph, 'config', lambda *args:
'{"client.radosgw.gateway": {"rgw init timeout": 60}}')
def test_ctxt_inconsistent_fsids(self, mock_config_get, mock_relation_ids):
def test_ctxt_inconsistent_fsids(self, mock_config_get, mock_relation_ids,
mock_https):
mock_https.return_value = False
mock_relation_ids.return_value = []
mock_config_get.side_effect = self.test_config.get
self.socket.gethostname.return_value = 'testhost'