Add yoga bundles and release-tool syncs

* charm-helpers sync for classic charms
* sync from release-tools
* switch to release-specific zosci functional tests
* run focal-ussuri as smoke tests
* remove trusty, xenial, and groovy metadata/tests
* drop py35 and add py39
* charms.ceph sync

Change-Id: I8b0ac822cdf37d70ac39f1b115f95a448afb624d
This commit is contained in:
Corey Bryant 2021-10-29 17:00:39 -04:00
parent d15ac894a9
commit 3a27c7090e
20 changed files with 352 additions and 348 deletions

View File

@ -1413,7 +1413,8 @@ def incomplete_relation_data(configs, required_interfaces):
for i in incomplete_relations}
def do_action_openstack_upgrade(package, upgrade_callback, configs):
def do_action_openstack_upgrade(package, upgrade_callback, configs,
force_upgrade=False):
"""Perform action-managed OpenStack upgrade.
Upgrades packages to the configured openstack-origin version and sets
@ -1427,12 +1428,13 @@ def do_action_openstack_upgrade(package, upgrade_callback, configs):
@param package: package name for determining if upgrade available
@param upgrade_callback: function callback to charm's upgrade function
@param configs: templating object derived from OSConfigRenderer class
@param force_upgrade: perform dist-upgrade regardless of new openstack
@return: True if upgrade successful; False if upgrade failed or skipped
"""
ret = False
if openstack_upgrade_available(package):
if openstack_upgrade_available(package) or force_upgrade:
if config('action-managed-upgrade'):
juju_log('Upgrading OpenStack release')
@ -2599,6 +2601,23 @@ def get_subordinate_release_packages(os_release, package_type='deb'):
return SubordinatePackages(install, purge)
def get_subordinate_services():
"""Iterate over subordinate relations and get service information.
In a similar fashion as with get_subordinate_release_packages(),
principle charms can retrieve a list of services advertised by their
subordinate charms. This is useful to know about subordinate services when
pausing, resuming or upgrading a principle unit.
:returns: Name of all services advertised by all subordinates
:rtype: Set[str]
"""
services = set()
for rdata in container_scoped_relation_get('services'):
services |= set(json.loads(rdata or '[]'))
return services
os_restart_on_change = partial(
pausable_restart_on_change,
can_restart_now_f=deferred_events.check_and_record_restart_request,

View File

@ -294,7 +294,6 @@ class BasePool(object):
# NOTE: Do not perform initialization steps that require live data from
# a running cluster here. The *Pool classes may be used for validation.
self.service = service
self.nautilus_or_later = cmp_pkgrevno('ceph-common', '14.2.0') >= 0
self.op = op or {}
if op:
@ -341,7 +340,8 @@ class BasePool(object):
Do not add calls for a specific pool type here, those should go into
one of the pool specific classes.
"""
if self.nautilus_or_later:
nautilus_or_later = cmp_pkgrevno('ceph-common', '14.2.0') >= 0
if nautilus_or_later:
# Ensure we set the expected pool ratio
update_pool(
client=self.service,
@ -660,8 +660,9 @@ class ReplicatedPool(BasePool):
else:
self.pg_num = self.get_pgs(self.replicas, self.percent_data)
nautilus_or_later = cmp_pkgrevno('ceph-common', '14.2.0') >= 0
# Create it
if self.nautilus_or_later:
if nautilus_or_later:
cmd = [
'ceph', '--id', self.service, 'osd', 'pool', 'create',
'--pg-num-min={}'.format(
@ -745,9 +746,9 @@ class ErasurePool(BasePool):
k = int(erasure_profile['k'])
m = int(erasure_profile['m'])
pgs = self.get_pgs(k + m, self.percent_data)
self.nautilus_or_later = cmp_pkgrevno('ceph-common', '14.2.0') >= 0
nautilus_or_later = cmp_pkgrevno('ceph-common', '14.2.0') >= 0
# Create it
if self.nautilus_or_later:
if nautilus_or_later:
cmd = [
'ceph', '--id', self.service, 'osd', 'pool', 'create',
'--pg-num-min={}'.format(

View File

@ -29,6 +29,7 @@ UBUNTU_RELEASES = (
'groovy',
'hirsute',
'impish',
'jammy',
)

View File

@ -275,6 +275,7 @@ UBUNTU_OPENSTACK_RELEASE = OrderedDict([
('groovy', 'victoria'),
('hirsute', 'wallaby'),
('impish', 'xena'),
('jammy', 'yoga'),
])

View File

@ -3169,6 +3169,8 @@ UPGRADE_PATHS = collections.OrderedDict([
('luminous', 'mimic'),
('mimic', 'nautilus'),
('nautilus', 'octopus'),
('octopus', 'pacific'),
('pacific', 'quincy'),
])
# Map UCA codenames to ceph codenames
@ -3186,6 +3188,10 @@ UCA_CODENAME_MAP = {
'stein': 'mimic',
'train': 'nautilus',
'ussuri': 'octopus',
'victoria': 'octopus',
'wallaby': 'pacific',
'xena': 'pacific',
'yoga': 'quincy',
}

View File

@ -13,7 +13,6 @@ tags:
- file-servers
- misc
series:
- xenial
- bionic
- focal
- groovy

View File

@ -1,52 +1,65 @@
- project:
templates:
- charm-unit-jobs
- charm-yoga-unit-jobs
check:
jobs:
- vault-bionic-queens
- vault-bionic-queens-namespaced
- vault-bionic-stein
- vault-bionic-stein-namespaced
- vault-bionic-ussuri
- vault-bionic-ussuri-namespaced
- vault-focal-ussuri-ec
- vault-focal-ussuri_rgw
- vault-focal-ussuri-namespaced
- vault-focal-victoria_rgw
- vault-focal-victoria-namespaced
- vault-focal-wallaby_rgw
- vault-focal-wallaby-namespaced
- vault-focal-xena_rgw
- vault-focal-xena-namespaced
- vault-focal-yoga_rgw:
voting: false
- vault-focal-yoga-namespaced:
voting: false
- vault-hirsute-wallaby_rgw
- vault-hirsute-wallaby-namespaced
- vault-impish-xena_rgw:
voting: false
- vault-impish-xena-namespaced:
voting: false
- vault-hirsute-wallaby_rgw
- vault-hirsute-wallaby-namespaced
- vault-focal-xena_rgw:
- vault-jammy-yoga_rgw:
voting: false
- vault-focal-xena-namespaced:
- vault-jammy-yoga-namespaced:
voting: false
- vault-focal-wallaby_rgw
- vault-focal-wallaby-namespaced
- vault-focal-victoria_rgw
- vault-focal-victoria-namespaced
- vault-focal-ussuri-ec
- vault-focal-ussuri_rgw
- vault-focal-ussuri-namespaced
- vault-bionic-ussuri
- vault-bionic-ussuri-namespaced
- vault-bionic-train
- vault-bionic-train-namespaced
- vault-bionic-stein
- vault-bionic-stein-namespaced
- vault-bionic-queens
- vault-bionic-queens-namespaced
- xenial-mitaka_rgw
- xenial-mitaka-namespaced
- job:
name: vault-bionic-ussuri
parent: func-target
dependencies:
- osci-lint
- tox-py35
- tox-py36
- tox-py37
- tox-py38
- tox-py39
vars:
tox_extra_args: vault:bionic-ussuri
- job:
name: vault-impish-xena_rgw
name: vault-jammy-yoga_rgw
parent: func-target
dependencies: &smoke-jobs
- vault-bionic-ussuri
vars:
tox_extra_args: vault:jammy-yoga
- job:
name: vault-jammy-yoga-namespaced
parent: func-target
dependencies: *smoke-jobs
vars:
tox_extra_args: vault:jammy-yoga-namespaced
- job:
name: vault-impish-xena_rgw
parent: func-target
dependencies: *smoke-jobs
vars:
tox_extra_args: vault:impish-xena
- job:
@ -67,6 +80,18 @@
dependencies: *smoke-jobs
vars:
tox_extra_args: vault:hirsute-wallaby-namespaced
- job:
name: vault-focal-yoga_rgw
parent: func-target
dependencies: *smoke-jobs
vars:
tox_extra_args: vault:focal-yoga
- job:
name: vault-focal-yoga-namespaced
parent: func-target
dependencies: *smoke-jobs
vars:
tox_extra_args: vault:focal-yoga-namespaced
- job:
name: vault-focal-xena_rgw
parent: func-target
@ -127,18 +152,6 @@
dependencies: *smoke-jobs
vars:
tox_extra_args: vault:bionic-ussuri-namespaced
- job:
name: vault-bionic-train
parent: func-target
dependencies: *smoke-jobs
vars:
tox_extra_args: vault:bionic-train
- job:
name: vault-bionic-train-namespaced
parent: func-target
dependencies: *smoke-jobs
vars:
tox_extra_args: vault:bionic-train-namespaced
- job:
name: vault-bionic-stein
parent: func-target
@ -163,13 +176,3 @@
dependencies: *smoke-jobs
vars:
tox_extra_args: vault:bionic-queens-namespaced
- job:
name: xenial-mitaka_rgw
parent: xenial-mitaka
dependencies: *smoke-jobs
- job:
name: xenial-mitaka-namespaced
parent: func-target
dependencies: *smoke-jobs
vars:
tox_extra_args: xenial-mitaka-namespaced

View File

@ -7,6 +7,8 @@
# requirements. They are intertwined. Also, Zaza itself should specify
# all of its own requirements and if it doesn't, fix it there.
#
pyparsing<3.0.0 # aodhclient is pinned in zaza and needs pyparsing < 3.0.0, but cffi also needs it, so pin here.
cffi==1.14.6; python_version < '3.6' # cffi 1.15.0 drops support for py35.
setuptools<50.0.0 # https://github.com/pypa/setuptools/commit/04e3df22df840c6bb244e9b27bc56750c44b7c85
requests>=2.18.4

View File

@ -0,0 +1,117 @@
options:
source: &source cloud:focal-yoga
series: focal
comment:
- 'machines section to decide order of deployment. database sooner = faster'
machines:
'0':
constraints: mem=3072M
'1':
constraints: mem=3072M
'2':
constraints: mem=3072M
'3':
'4':
'5':
'6':
'7':
'8':
'9':
'10':
'11':
applications:
keystone-mysql-router:
charm: cs:~openstack-charmers-next/mysql-router
mysql-innodb-cluster:
charm: cs:~openstack-charmers-next/mysql-innodb-cluster
num_units: 3
options:
source: *source
to:
- '0'
- '1'
- '2'
ceph-radosgw:
charm: ceph-radosgw
num_units: 1
options:
source: *source
namespace-tenants: True
to:
- '3'
ceph-osd:
charm: cs:~openstack-charmers-next/ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
to:
- '4'
- '5'
- '6'
ceph-mon:
charm: cs:~openstack-charmers-next/ceph-mon
num_units: 3
options:
source: *source
to:
- '7'
- '8'
- '9'
keystone:
expose: True
charm: cs:~openstack-charmers-next/keystone
num_units: 1
options:
openstack-origin: *source
to:
- '10'
vault-mysql-router:
charm: cs:~openstack-charmers-next/mysql-router
vault:
charm: cs:~openstack-charmers-next/vault
num_units: 1
to:
- '11'
relations:
- - 'keystone:shared-db'
- 'keystone-mysql-router:shared-db'
- - 'keystone-mysql-router:db-router'
- 'mysql-innodb-cluster:db-router'
- - 'ceph-osd:mon'
- 'ceph-mon:osd'
- - 'ceph-radosgw:mon'
- 'ceph-mon:radosgw'
- - 'ceph-radosgw:identity-service'
- 'keystone:identity-service'
- - 'vault-mysql-router:db-router'
- 'mysql-innodb-cluster:db-router'
- - 'vault:shared-db'
- 'vault-mysql-router:shared-db'
- - 'keystone:certificates'
- 'vault:certificates'
- - 'ceph-radosgw:certificates'
- 'vault:certificates'

View File

@ -0,0 +1,116 @@
options:
source: &source cloud:focal-yoga
series: focal
comment:
- 'machines section to decide order of deployment. database sooner = faster'
machines:
'0':
constraints: mem=3072M
'1':
constraints: mem=3072M
'2':
constraints: mem=3072M
'3':
'4':
'5':
'6':
'7':
'8':
'9':
'10':
'11':
applications:
keystone-mysql-router:
charm: cs:~openstack-charmers-next/mysql-router
mysql-innodb-cluster:
charm: cs:~openstack-charmers-next/mysql-innodb-cluster
num_units: 3
options:
source: *source
to:
- '0'
- '1'
- '2'
ceph-radosgw:
charm: ceph-radosgw
num_units: 1
options:
source: *source
to:
- '3'
ceph-osd:
charm: cs:~openstack-charmers-next/ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
to:
- '4'
- '5'
- '6'
ceph-mon:
charm: cs:~openstack-charmers-next/ceph-mon
num_units: 3
options:
source: *source
to:
- '7'
- '8'
- '9'
keystone:
expose: True
charm: cs:~openstack-charmers-next/keystone
num_units: 1
options:
openstack-origin: *source
to:
- '10'
vault-mysql-router:
charm: cs:~openstack-charmers-next/mysql-router
vault:
charm: cs:~openstack-charmers-next/vault
num_units: 1
to:
- '11'
relations:
- - 'keystone:shared-db'
- 'keystone-mysql-router:shared-db'
- - 'keystone-mysql-router:db-router'
- 'mysql-innodb-cluster:db-router'
- - 'ceph-osd:mon'
- 'ceph-mon:osd'
- - 'ceph-radosgw:mon'
- 'ceph-mon:radosgw'
- - 'ceph-radosgw:identity-service'
- 'keystone:identity-service'
- - 'vault-mysql-router:db-router'
- 'mysql-innodb-cluster:db-router'
- - 'vault:shared-db'
- 'vault-mysql-router:shared-db'
- - 'keystone:certificates'
- 'vault:certificates'
- - 'ceph-radosgw:certificates'
- 'vault:certificates'

View File

@ -1,7 +1,7 @@
options:
source: &source distro
series: groovy
series: jammy
comment:
- 'machines section to decide order of deployment. database sooner = faster'

View File

@ -1,7 +1,7 @@
options:
source: &source distro
series: groovy
series: jammy
comment:
- 'machines section to decide order of deployment. database sooner = faster'

View File

@ -1,42 +0,0 @@
options:
source: &source cloud:trusty-mitaka
series: trusty
applications:
ceph-radosgw:
charm: ceph-radosgw
series: trusty
num_units: 1
options:
source: *source
ceph-osd:
charm: cs:~openstack-charmers-next/ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
ceph-mon:
charm: cs:~openstack-charmers-next/ceph-mon
num_units: 3
options:
source: *source
percona-cluster:
charm: cs:trusty/percona-cluster
num_units: 1
keystone:
expose: True
charm: cs:~openstack-charmers-next/keystone
num_units: 1
options:
openstack-origin: *source
relations:
- - keystone:shared-db
- percona-cluster:shared-db
- - ceph-osd:mon
- ceph-mon:osd
- - ceph-radosgw:mon
- ceph-mon:radosgw
- - ceph-radosgw:identity-service
- keystone:identity-service

View File

@ -1,43 +0,0 @@
options:
source: &source distro
series: xenial
applications:
ceph-radosgw:
charm: ceph-radosgw
series: xenial
num_units: 1
options:
source: *source
namespace-tenants: True
ceph-osd:
charm: cs:~openstack-charmers-next/ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
ceph-mon:
charm: cs:~openstack-charmers-next/ceph-mon
num_units: 3
options:
source: *source
percona-cluster:
charm: cs:~openstack-charmers-next/percona-cluster
num_units: 1
keystone:
expose: True
charm: cs:~openstack-charmers-next/keystone
num_units: 1
options:
openstack-origin: *source
relations:
- - keystone:shared-db
- percona-cluster:shared-db
- - ceph-osd:mon
- ceph-mon:osd
- - ceph-radosgw:mon
- ceph-mon:radosgw
- - ceph-radosgw:identity-service
- keystone:identity-service

View File

@ -1,42 +0,0 @@
options:
source: &source distro
series: xenial
applications:
ceph-radosgw:
charm: ceph-radosgw
series: xenial
num_units: 1
options:
source: *source
ceph-osd:
charm: cs:~openstack-charmers-next/ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
ceph-mon:
charm: cs:~openstack-charmers-next/ceph-mon
num_units: 3
options:
source: *source
percona-cluster:
charm: cs:~openstack-charmers-next/percona-cluster
num_units: 1
keystone:
expose: True
charm: cs:~openstack-charmers-next/keystone
num_units: 1
options:
openstack-origin: *source
relations:
- - keystone:shared-db
- percona-cluster:shared-db
- - ceph-osd:mon
- ceph-mon:osd
- - ceph-radosgw:mon
- ceph-mon:radosgw
- - ceph-radosgw:identity-service
- keystone:identity-service

View File

@ -1,42 +0,0 @@
options:
source: &source cloud:xenial-ocata
series: xenial
applications:
ceph-radosgw:
charm: ceph-radosgw
series: xenial
num_units: 1
options:
source: *source
ceph-osd:
charm: cs:~openstack-charmers-next/ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
ceph-mon:
charm: cs:~openstack-charmers-next/ceph-mon
num_units: 3
options:
source: *source
percona-cluster:
charm: cs:~openstack-charmers-next/percona-cluster
num_units: 1
keystone:
expose: True
charm: cs:~openstack-charmers-next/keystone
num_units: 1
options:
openstack-origin: *source
relations:
- - keystone:shared-db
- percona-cluster:shared-db
- - ceph-osd:mon
- ceph-mon:osd
- - ceph-radosgw:mon
- ceph-mon:radosgw
- - ceph-radosgw:identity-service
- keystone:identity-service

View File

@ -1,42 +0,0 @@
options:
source: &source cloud:xenial-pike
series: xenial
applications:
ceph-radosgw:
charm: ceph-radosgw
series: xenial
num_units: 1
options:
source: *source
ceph-osd:
charm: cs:~openstack-charmers-next/ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
ceph-mon:
charm: cs:~openstack-charmers-next/ceph-mon
num_units: 3
options:
source: *source
percona-cluster:
charm: cs:~openstack-charmers-next/percona-cluster
num_units: 1
keystone:
expose: True
charm: cs:~openstack-charmers-next/keystone
num_units: 1
options:
openstack-origin: *source
relations:
- - keystone:shared-db
- percona-cluster:shared-db
- - ceph-osd:mon
- ceph-mon:osd
- - ceph-radosgw:mon
- ceph-mon:radosgw
- - ceph-radosgw:identity-service
- keystone:identity-service

View File

@ -1,51 +0,0 @@
options:
source: &source cloud:xenial-queens
series: xenial
applications:
ceph-radosgw:
charm: ceph-radosgw
series: xenial
num_units: 1
options:
source: *source
ceph-osd:
charm: cs:~openstack-charmers-next/ceph-osd
num_units: 3
constraints: "mem=2048"
storage:
osd-devices: 'cinder,10G'
options:
source: *source
osd-devices: '/srv/ceph /dev/test-non-existent'
ceph-mon:
charm: cs:~openstack-charmers-next/ceph-mon
num_units: 3
options:
source: *source
percona-cluster:
charm: cs:~openstack-charmers-next/percona-cluster
num_units: 1
keystone:
expose: True
charm: cs:~openstack-charmers-next/keystone
num_units: 1
options:
openstack-origin: *source
vault:
charm: cs:~openstack-charmers-next/vault
num_units: 1
relations:
- - keystone:shared-db
- percona-cluster:shared-db
- - ceph-osd:mon
- ceph-mon:osd
- - ceph-radosgw:mon
- ceph-mon:radosgw
- - ceph-radosgw:identity-service
- keystone:identity-service
- - vault:shared-db
- percona-cluster:shared-db
- - keystone:certificates
- vault:certificates
- - ceph-radosgw:certificates
- vault:certificates

View File

@ -1,44 +1,40 @@
charm_name: ceph-radosgw
gate_bundles:
- vault: focal-xena
- vault: focal-xena-namespaced
- vault: focal-wallaby
- vault: focal-wallaby-namespaced
- vault: focal-victoria
- vault: focal-victoria-namespaced
- vault: focal-ussuri-ec
- vault: focal-ussuri
- vault: focal-ussuri-namespaced
- vault: bionic-ussuri
- vault: bionic-ussuri-namespaced
- vault: bionic-train
- vault: bionic-train-namespaced
- vault: bionic-stein
- vault: bionic-stein-namespaced
- vault: bionic-queens
- vault: bionic-queens-namespaced
- xenial-mitaka
- xenial-mitaka-namespaced
- vault: bionic-stein
- vault: bionic-stein-namespaced
- vault: bionic-ussuri
- vault: bionic-ussuri-namespaced
- vault: focal-ussuri
- vault: focal-ussuri-ec
- vault: focal-ussuri-namespaced
- vault: focal-victoria
- vault: focal-victoria-namespaced
- vault: focal-wallaby
- vault: focal-wallaby-namespaced
- vault: focal-xena
- vault: focal-xena-namespaced
- vault: hirsute-wallaby
- vault: hirsute-wallaby-namespaced
- vault: impish-xena
- vault: impish-xena-namespaced
smoke_bundles:
- vault: focal-ussuri
dev_bundles:
- trusty-mitaka
- xenial-ocata
- xenial-pike
- vault: xenial-queens
- bionic-queens-multisite
- bionic-rocky-multisite
- vault: bionic-rocky
- vault: bionic-rocky-namespaced
- vault: groovy-victoria
- vault: groovy-victoria-namespaced
- vault: hirsute-wallaby
- vault: hirsute-wallaby-namespaced
- vault: impish-xena
- vault: impish-xena-namespaced
- vault: bionic-train
- vault: bionic-train-namespaced
- vault: focal-yoga
- vault: focal-yoga-namespaced
- vault: jammy-yoga
- vault: jammy-yoga-namespaced
target_deploy_status:
vault:
@ -61,7 +57,7 @@ tests_options:
force_deploy:
- hirsute-wallaby
- hirsute-wallaby-namespaced
- groovy-victoria
- groovy-victoria-namespaced
- impish-xena
- impish-xena-namespaced
- jammy-yoga
- jammy-yoga-namespaced

View File

@ -61,6 +61,11 @@ basepython = python3.8
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
[testenv:py39]
basepython = python3.9
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
[testenv:py3]
basepython = python3
deps = -r{toxinidir}/requirements.txt