Update amulet test definitions

Also sync charm-helpers

This change requires the following charm-helpers change
to land first:

 - https://github.com/juju/charm-helpers/pull/32

Change-Id: I2c33e25e14ad49d65f5fc7eb000830086cf829c1
This commit is contained in:
Ryan Beisner 2017-09-28 03:53:03 +00:00 committed by David Ames
parent 60e7e79c3e
commit 34f39329c0
12 changed files with 96 additions and 54 deletions

View File

@ -858,9 +858,12 @@ class OpenStackAmuletUtils(AmuletUtils):
:returns: List of pool name, object count, kb disk space used
"""
df = self.get_ceph_df(sentry_unit)
pool_name = df['pools'][pool_id]['name']
obj_count = df['pools'][pool_id]['stats']['objects']
kb_used = df['pools'][pool_id]['stats']['kb_used']
for pool in df['pools']:
if pool['id'] == pool_id:
pool_name = pool['name']
obj_count = pool['stats']['objects']
kb_used = pool['stats']['kb_used']
self.log.debug('Ceph {} pool (ID {}): {} objects, '
'{} kb used'.format(pool_name, pool_id,
obj_count, kb_used))

View File

@ -392,6 +392,8 @@ def get_swift_codename(version):
releases = UBUNTU_OPENSTACK_RELEASE
release = [k for k, v in six.iteritems(releases) if codename in v]
ret = subprocess.check_output(['apt-cache', 'policy', 'swift'])
if six.PY3:
ret = ret.decode('UTF-8')
if codename in ret or release[0] in ret:
return codename
elif len(codenames) == 1:

View File

@ -377,12 +377,12 @@ def get_mon_map(service):
try:
return json.loads(mon_status)
except ValueError as v:
log("Unable to parse mon_status json: {}. Error: {}".format(
mon_status, v.message))
log("Unable to parse mon_status json: {}. Error: {}"
.format(mon_status, str(v)))
raise
except CalledProcessError as e:
log("mon_status command failed with message: {}".format(
e.message))
log("mon_status command failed with message: {}"
.format(str(e)))
raise

View File

@ -549,6 +549,8 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
with open(path, 'wb') as target:
os.fchown(target.fileno(), uid, gid)
os.fchmod(target.fileno(), perms)
if six.PY3 and isinstance(content, six.string_types):
content = content.encode('UTF-8')
target.write(content)
return
# the contents were the same, but we might still need to change the

View File

@ -20,6 +20,7 @@ UBUNTU_RELEASES = (
'yakkety',
'zesty',
'artful',
'bionic',
)

View File

@ -65,7 +65,7 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
# Note: cinder-ceph becomes a cinder subordinate unit.
this_service = {'name': 'cinder-ceph'}
other_services = [
{'name': 'percona-cluster', 'constraints': {'mem': '3072M'}},
{'name': 'percona-cluster'},
{'name': 'keystone'},
{'name': 'rabbitmq-server'},
{'name': 'ceph', 'units': 3},
@ -100,10 +100,8 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
'admin-token': 'ubuntutesting'
}
pxc_config = {
'dataset-size': '25%',
'innodb-buffer-pool-size': '256M',
'max-connections': 1000,
'root-password': 'ChangeMe123',
'sst-password': 'ChangeMe123',
}
cinder_config = {
'block-device': 'None',
@ -245,18 +243,32 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
def test_110_users(self):
"""Verify expected users."""
u.log.debug('Checking keystone users...')
expected = [
{'name': 'cinder_cinderv2',
'enabled': True,
'tenantId': u.not_null,
'id': u.not_null,
'email': 'juju@localhost'},
{'name': 'admin',
'enabled': True,
'tenantId': u.not_null,
'id': u.not_null,
'email': 'juju@localhost'}
]
if self._get_openstack_release() < self.xenial_pike:
expected = [{
'name': 'cinder_cinderv2',
'enabled': True,
'tenantId': u.not_null,
'id': u.not_null,
'email': 'juju@localhost',
}]
else:
expected = [{
'name': 'cinderv3_cinderv2',
'enabled': True,
'tenantId': u.not_null,
'id': u.not_null,
'email': 'juju@localhost',
}]
expected.append({
'name': 'admin',
'enabled': True,
'tenantId': u.not_null,
'id': u.not_null,
'email': 'juju@localhost',
})
actual = self.keystone.users.list()
ret = u.validate_user_data(expected, actual)
if ret:
@ -265,26 +277,26 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
def test_112_service_catalog(self):
"""Verify that the service catalog endpoint data"""
u.log.debug('Checking keystone service catalog...')
endpoint_vol = {
'adminURL': u.valid_url,
'region': 'RegionOne',
'publicURL': u.valid_url,
'internalURL': u.valid_url
}
endpoint_id = {
'adminURL': u.valid_url,
'region': 'RegionOne',
'publicURL': u.valid_url,
'internalURL': u.valid_url
}
endpoint_vol = {'adminURL': u.valid_url,
'region': 'RegionOne',
'publicURL': u.valid_url,
'internalURL': u.valid_url}
endpoint_id = {'adminURL': u.valid_url,
'region': 'RegionOne',
'publicURL': u.valid_url,
'internalURL': u.valid_url}
if self._get_openstack_release() >= self.trusty_icehouse:
endpoint_vol['id'] = u.not_null
endpoint_id['id'] = u.not_null
expected = {
'identity': [endpoint_id],
'volume': [endpoint_id]
}
if self._get_openstack_release() >= self.xenial_pike:
# Pike and later
expected = {'identity': [endpoint_id],
'volumev2': [endpoint_id]}
else:
# Ocata and prior
expected = {'identity': [endpoint_id],
'volume': [endpoint_id]}
actual = self.keystone.service_catalog.get_endpoints()
ret = u.validate_svc_catalog_endpoint_data(expected, actual)
@ -491,10 +503,17 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
'auth_protocol': 'http',
'private-address': u.valid_ip,
'auth_host': u.valid_ip,
'service_username': 'cinder_cinderv2',
'service_tenant_id': u.not_null,
'service_host': u.valid_ip
}
if self._get_openstack_release() < self.xenial_pike:
# Ocata and earlier
expected['service_username'] = 'cinder_cinderv2'
else:
# Pike and later
expected['service_username'] = 'cinderv3_cinderv2'
ret = u.validate_relation_data(unit, relation, expected)
if ret:
msg = u.relation_error('identity-service cinder', ret)
@ -507,11 +526,6 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
relation = ['identity-service',
'keystone:identity-service']
expected = {
'cinder_service': 'cinder',
'cinder_region': 'RegionOne',
'cinder_public_url': u.valid_url,
'cinder_internal_url': u.valid_url,
'cinder_admin_url': u.valid_url,
'private-address': u.valid_ip
}
ret = u.validate_relation_data(unit, relation, expected)
@ -654,6 +668,12 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
if 'cinder-ceph' not in expected_pools:
expected_pools.append('cinder-ceph')
if (self._get_openstack_release() >= self.xenial_ocata and
'cinder' in expected_pools):
# No cinder after mitaka because we don't use the relation in this
# test
expected_pools.remove('cinder')
results = []
sentries = [
self.ceph0_sentry,
@ -700,6 +720,8 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
# Check ceph cinder pool object count, disk space usage and pool name
u.log.debug('Checking ceph cinder pool original samples...')
u.log.debug('cinder-ceph pool: {}'.format(cinder_ceph_pool))
u.log.debug('Checking ceph cinder pool original samples...')
pool_name, obj_count, kb_used = u.get_ceph_pool_sample(
sentry_unit, cinder_ceph_pool)
@ -741,11 +763,14 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
if ret:
amulet.raise_status(amulet.FAIL, msg=ret)
# Validate ceph cinder pool disk space usage samples over time
ret = u.validate_ceph_pool_samples(pool_size_samples,
"cinder pool disk usage")
if ret:
amulet.raise_status(amulet.FAIL, msg=ret)
# Luminous (pike) ceph seems more efficient at disk usage so we cannot
# grantee the ordering of kb_used
if self._get_openstack_release() < self.xenial_mitaka:
# Validate ceph cinder pool disk space usage samples over time
ret = u.validate_ceph_pool_samples(pool_size_samples,
"cinder pool disk usage")
if ret:
amulet.raise_status(amulet.FAIL, msg=ret)
def test_499_ceph_cmds_exit_zero(self):
"""Check basic functionality of ceph cli commands against

View File

@ -858,9 +858,12 @@ class OpenStackAmuletUtils(AmuletUtils):
:returns: List of pool name, object count, kb disk space used
"""
df = self.get_ceph_df(sentry_unit)
pool_name = df['pools'][pool_id]['name']
obj_count = df['pools'][pool_id]['stats']['objects']
kb_used = df['pools'][pool_id]['stats']['kb_used']
for pool in df['pools']:
if pool['id'] == pool_id:
pool_name = pool['name']
obj_count = pool['stats']['objects']
kb_used = pool['stats']['kb_used']
self.log.debug('Ceph {} pool (ID {}): {} objects, '
'{} kb used'.format(pool_name, pool_id,
obj_count, kb_used))

View File

@ -549,6 +549,8 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
with open(path, 'wb') as target:
os.fchown(target.fileno(), uid, gid)
os.fchmod(target.fileno(), perms)
if six.PY3 and isinstance(content, six.string_types):
content = content.encode('UTF-8')
target.write(content)
return
# the contents were the same, but we might still need to change the

View File

@ -20,6 +20,7 @@ UBUNTU_RELEASES = (
'yakkety',
'zesty',
'artful',
'bionic',
)

View File

@ -21,3 +21,6 @@ from basic_deployment import CinderCephBasicDeployment
if __name__ == '__main__':
deployment = CinderCephBasicDeployment(series='artful')
deployment.run_tests()
# NOTE(beisner): Artful target disabled, pending bug:
# https://bugs.launchpad.net/charm-percona-cluster/+bug/1728132

0
tests/gate-basic-xenial-pike Normal file → Executable file
View File

View File

@ -60,7 +60,7 @@ basepython = python2.7
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands =
bundletester -vl DEBUG -r json -o func-results.json gate-basic-xenial-mitaka --no-destroy
bundletester -vl DEBUG -r json -o func-results.json gate-basic-xenial-pike --no-destroy
[testenv:func27-dfs]
# Charm Functional Test