Create tests for patching feature

Add first test for patching environment, which
deploys some cluster and apply patches on it.
Environment configuration (deployment test) is chosen
automatically based on packages tags.
Tests require errata.yaml which should include scenarios
for applying/verifying patches.
Additional variables required by patching tests:
 * PATCHING_MIRRORS - list of repositories with patched
   packages;
 * PATCHING_WEB_DIR - directory on master node which is
   available via HTTP (used for new repository);
 * PATCHING_BUG_ID - bug ID which is going to be fixed;
 * PATCHING_PKGS_TESTS - local path or URL to destination
   where test plans for packages are stored;
 * PATCHING_APPLY_TESTS - local path or URL which should
   be used to get test plans for bugs (using bug ID);
 * PATCHING_PKGS - list of patched packages; if empty -
   list is gotten from test plan (errata.yaml) or from
   repository.

Implements: blueprint mos-patching-tests

Change-Id: Ic696a49bd12504c9e045cf824248ff09bb7d53cb
This commit is contained in:
NastyaUrlapova 2015-02-19 12:07:37 +03:00 committed by Artem Panchenko
parent fcb6fe24f6
commit 17d34057c7
16 changed files with 745 additions and 8 deletions

View File

@ -309,3 +309,20 @@ class FuelPluginBuilder(object):
with open(new_file, 'w') as f_new:
yaml.dump(origin_yaml, f_new)
class CobblerActions(BaseActions):
def __init__(self, admin_remote):
super(CobblerActions, self).__init__(admin_remote)
self.container = 'cobbler'
def add_dns_upstream_server(self, dns_server_ip):
self.execute_in_container(
command="sed '$anameserver {0}' -i /etc/dnsmasq.upstream".format(
dns_server_ip),
exit_code=0,
)
self.execute_in_container(
command='service dnsmasq restart',
exit_code=0
)

View File

@ -0,0 +1,475 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import yaml
import zlib
from urllib2 import HTTPError
from urllib2 import urlopen
from urlparse import urlparse
from xml.dom.minidom import parseString
from proboscis import register
from proboscis import TestProgram
from proboscis.asserts import assert_equal
from proboscis.asserts import assert_is_not_none
from proboscis.asserts import assert_not_equal
from proboscis.asserts import assert_true
from fuelweb_test import settings
from fuel_actions import CobblerActions
patching_validation_schema = {
'type': {
'required': True,
'values': ['service_stop', 'service_start', 'service_restart',
'server_down', 'server_up', 'server_reboot',
'run_command', 'upload_script'],
'data_type': str
},
'target': {
'required': True,
'values': {'master', 'slaves', 'controller_role', 'compute_role',
'cinder_role', 'ceph-osd_role', 'mongo_role',
'zabbix-server_role', 'base-os_role'},
'data_type': list
},
'service': {
'required': False,
'data_type': str
},
'command': {
'required': False,
'data_type': str
},
'script': {
'required': False,
'data_type': str
},
'upload_path': {
'required': False,
'data_type': str
},
'id': {
'required': True,
'data_type': int
}
}
def map_test():
assert_is_not_none(settings.PATCHING_BUG_ID,
"Bug ID wasn't specified, can't start patching tests!")
errata = get_errata(path=settings.PATCHING_APPLY_TESTS,
bug_id=settings.PATCHING_BUG_ID)
verify_errata(errata)
if 'pkgs' in errata.keys():
if settings.OPENSTACK_RELEASE_CENTOS in settings.OPENSTACK_RELEASE:
settings.PATCHING_PKGS = set(errata['pkgs']['centos'])
else:
settings.PATCHING_PKGS = set(errata['pkgs']['ubuntu'])
available_packages = set()
for repo in settings.PATCHING_MIRRORS:
available_packages.update(get_repository_packages(repo))
if not settings.PATCHING_PKGS:
settings.PATCHING_PKGS = available_packages
else:
assert_true(settings.PATCHING_PKGS <= available_packages,
"Patching repositories don't contain all packages needed "
"for tests. Need: {0}, but available: {1}.".format(
settings.PATCHING_PKGS, available_packages))
assert_not_equal(len(settings.PATCHING_PKGS), 0,
"No packages found in repository(s) for patching:"
" '{0}'".format(settings.PATCHING_MIRRORS))
tests_groups = get_packages_tests(settings.PATCHING_PKGS)
program = TestProgram(argv=['none'])
deployment_test = None
for my_test in program.plan.tests:
if all(patching_group in my_test.entry.info.groups for
patching_group in tests_groups):
deployment_test = my_test
break
if deployment_test:
settings.PATCHING_SNAPSHOT = 'patching_after_{0}'.format(
deployment_test.entry.method.im_func.func_name)
register(groups=['prepare_patching_environment'],
depends_on=[deployment_test.entry.home])
else:
raise Exception("Test with groups {0} not found.".format(tests_groups))
def get_repository_packages(remote_repo_url):
repo_url = urlparse(remote_repo_url)
packages = []
if settings.OPENSTACK_RELEASE == settings.OPENSTACK_RELEASE_UBUNTU:
packages_url = '{0}/Packages'.format(repo_url.geturl())
pkgs_raw = urlopen(packages_url).read()
for pkg in pkgs_raw.split('\n'):
match = re.search(r'^Package: (\S+)\s*$', pkg)
if match:
packages.append(match.group(1))
else:
packages_url = '{0}/repodata/primary.xml.gz'.format(repo_url.geturl())
pkgs_xml = parseString(zlib.decompressobj(zlib.MAX_WBITS | 32).
decompress(urlopen(packages_url).read()))
for pkg in pkgs_xml.getElementsByTagName('package'):
packages.append(
pkg.getElementsByTagName('name')[0].firstChild.nodeValue)
return packages
def _get_target_and_project(_pkg, _all_pkgs):
for _installation_target in _all_pkgs.keys():
for _project in _all_pkgs[_installation_target]['projects']:
if _pkg in _project['packages']:
return _installation_target, _project['name']
def get_package_test_info_remote(package, pkg_type, tests_url):
packages_url = "{0}/{1}/packages.yaml".format(tests_url, pkg_type)
tests = set()
tests_file = 'test.yaml'
all_packages = yaml.load(urlopen(packages_url).read())
assert_is_not_none(_get_target_and_project(package, all_packages),
"Package '{0}' doesn't belong to any installation "
"target / project".format(package))
target, project = _get_target_and_project(package, all_packages)
target_tests_url = "/".join((tests_url, pkg_type, target, tests_file))
project_tests_url = "/".join((tests_url, pkg_type, target, project,
tests_file))
package_tests_url = "/".join((tests_url, pkg_type, target, project,
package, tests_file))
for url in (target_tests_url, project_tests_url, package_tests_url):
try:
test = yaml.load(urlopen(url).read())
if 'system_tests' in test.keys():
tests.update(test['system_tests']['tags'])
except HTTPError:
pass
return tests
def get_package_test_info_local(package, pkg_type, tests_path):
packages_path = "{0}/{1}/packages.yaml".format(tests_path, pkg_type)
tests = set()
tests_file = 'test.yaml'
all_packages = yaml.load(open(packages_path).read())
assert_is_not_none(_get_target_and_project(package, all_packages),
"Package '{0}' doesn't belong to any installation "
"target / project".format(package))
target, project = _get_target_and_project(package, all_packages)
target_tests_path = "/".join((tests_path, pkg_type, target, tests_file))
project_tests_path = "/".join((tests_path, pkg_type, target, project,
tests_file))
package_tests_path = "/".join((tests_path, pkg_type, target, project,
package, tests_file))
for path in (target_tests_path, project_tests_path, package_tests_path):
try:
test = yaml.load(open(path).read())
if 'system_tests' in test.keys():
tests.update(test['system_tests']['tags'])
except IOError:
pass
return tests
def get_packages_tests(packages):
if 'http' in urlparse(settings.PATCHING_PKGS_TESTS):
get_method = get_package_test_info_remote
elif os.path.isdir(settings.PATCHING_PKGS_TESTS):
get_method = get_package_test_info_local
else:
raise Exception("Path for packages tests doesn't look like URL or loca"
"l folder: '{0}'".format(settings.PATCHING_PKGS_TESTS))
if settings.OPENSTACK_RELEASE == settings.OPENSTACK_RELEASE_UBUNTU:
pkg_type = 'deb'
else:
pkg_type = 'rpm'
packages_tests = set()
for package in packages:
tests = get_method(package, pkg_type, settings.PATCHING_PKGS_TESTS)
assert_true(len(tests) > 0,
"Tests for package {0} not found".format(package))
packages_tests.update(tests)
return packages_tests
def enable_local_dns_resolving(environment):
admin_remote = environment.d_env.get_admin_remote()
router_ip = environment.d_env.router()
# Add router IP to the DNS servers list on master node
fuel_cobbler_actions = CobblerActions(admin_remote=admin_remote)
fuel_cobbler_actions.add_dns_upstream_server(router_ip)
def mirror_remote_repository(admin_remote, remote_repo_url, local_repo_path):
repo_url = urlparse(remote_repo_url)
cut_dirs = len(repo_url.path.strip('/').split('/'))
download_cmd = ('wget --recursive --no-parent --no-verbose --reject "index'
'.html*,*.gif" --exclude-directories "{pwd}/repocache" '
'--directory-prefix {path} -nH --cut-dirs={cutd} {url}').\
format(pwd=repo_url.path.rstrip('/'), path=local_repo_path,
cutd=cut_dirs, url=repo_url.geturl())
result = admin_remote.execute(download_cmd)
assert_equal(result['exit_code'], 0, 'Mirroring of remote packages '
'repository failed: {0}'.format(
result))
def add_remote_repositories(environment):
repositories = set()
for mir in settings.PATCHING_MIRRORS:
name = 'custom_repo_{0}'.format(settings.PATCHING_MIRRORS.index(mir))
local_repo_path = '/'.join([settings.PATCHING_WEB_DIR, name])
remote_repo_url = mir
mirror_remote_repository(
admin_remote=environment.d_env.get_admin_remote(),
remote_repo_url=remote_repo_url,
local_repo_path=local_repo_path)
repositories.add(name)
return repositories
def connect_slaves_to_repo(environment, nodes, repo_name):
repourl = 'http://{master_ip}:8080/{repo_name}/'.format(
master_ip=environment.get_admin_node_ip(), repo_name=repo_name)
if settings.OPENSTACK_RELEASE == settings.OPENSTACK_RELEASE_UBUNTU:
cmds = [
"sed -e '$adeb {repourl} /' -i /etc/apt/sources.list".format(
repourl=repourl),
"apt-key add <(curl -s '{repourl}/Release.key')".format(
repourl=repourl),
"apt-get update"
]
else:
cmds = [
"/usr/bin/yum-config-manager --add-repo {repourl} "
"--setopt=gpgcheck=0".format(repourl=repourl),
"yum -y clean all",
"yum check-update; [[ $? -eq 100 ]]"
]
for slave in nodes:
remote = environment.d_env.get_ssh_to_remote(slave['ip'])
for cmd in cmds:
environment.execute_remote_cmd(remote, cmd, exit_code=0)
def update_packages(environment, remote, packages, exclude_packages=None):
if settings.OPENSTACK_RELEASE == settings.OPENSTACK_RELEASE_UBUNTU:
cmds = [
'apt-get -y upgrade {0}'.format(' '.join(packages))
]
if exclude_packages:
exclude_commands = ["apt-mark hold {0}".format(pkg)
for pkg in exclude_packages]
cmds = exclude_commands + cmds
else:
cmds = [
"yum -y update --nogpgcheck {0} -x '{1}'".format(
' '.join(packages), ','.join(exclude_packages or []))
]
for cmd in cmds:
environment.execute_remote_cmd(remote, cmd, exit_code=0)
def update_packages_on_slaves(environment, slaves, packages=None,
exclude_packages=None):
if not packages:
# Install all updates
packages = ' '
for slave in slaves:
remote = environment.d_env.get_ssh_to_remote(slave['ip'])
update_packages(environment, remote, packages, exclude_packages)
def get_slaves_ips_by_role(slaves, role=None):
if role:
return [slave['ip'] for slave in slaves if role in slave['roles']]
return [slave['ip'] for slave in slaves]
def get_devops_slaves_by_role(env, slaves, role=None):
if role:
return [env.fuel_web.find_devops_node_by_nailgun_fqdn(slave['fqdn'],
env.d_env.nodes().slaves)
for slave in slaves if role in slave['roles']]
return [env.fuel_web.find_devops_node_by_nailgun_fqdn(slave['fqdn'],
env.d_env.nodes().slaves) for slave in slaves]
def verify_fix_apply_step(apply_step):
validation_schema = patching_validation_schema
for key in validation_schema.keys():
if key in apply_step.keys():
is_exists = apply_step[key] is not None
else:
is_exists = False
if validation_schema[key]['required']:
assert_true(is_exists, "Required field '{0}' not found in patch "
"apply scenario step".format(key))
if not is_exists:
continue
is_valid = True
if 'values' in validation_schema[key].keys():
if validation_schema[key]['data_type'] == str:
is_valid = apply_step[key] in validation_schema[key]['values']
elif validation_schema[key]['data_type'] in (list, set):
is_valid = set(apply_step[key]) <= \
validation_schema[key]['values']
assert_true(is_valid, 'Step in patch apply actions scenario '
'contains incorrect data: "{key}": "{value}"'
'. Supported values for "{key}" are '
'"{valid}"'.format(
key=key,
value=apply_step[key],
valid=validation_schema[key]['values']))
if 'data_type' in validation_schema[key].keys():
assert_true(type(apply_step[key]) is
validation_schema[key]['data_type'],
"Unexpected data type in patch apply scenario step: '"
"{key}' is '{type}', but expecting '{expect}'.".format(
key=key,
type=type(apply_step[key]),
expect=validation_schema[key]['data_type']))
def validate_fix_apply_step(apply_step, environment, slaves):
verify_fix_apply_step(apply_step)
command = ''
remotes_ips = set()
devops_action = ''
devops_nodes = set()
for target in apply_step['target']:
if target == 'master':
remotes_ips.add(environment.get_admin_node_ip())
devops_nodes.add(
environment.d_env.nodes().admin)
elif target == 'slaves':
remotes_ips.update(get_slaves_ips_by_role(slaves, role=None))
devops_nodes.update(get_devops_slaves_by_role(environment, slaves))
else:
role = target.split('_role')[0]
remotes_ips.update(get_slaves_ips_by_role(slaves, role))
devops_nodes.update(get_devops_slaves_by_role(environment, slaves,
role=role))
if apply_step['type'] in ('service_stop', 'service_start',
'service_restart'):
assert_true(len(apply_step['service'] or '') > 0,
"Step #{0} in apply patch scenario perform '{1}', but "
"service isn't specified".format(apply_step['id'],
apply_step['type']))
action = apply_step['type'].split('service_')[1]
command = ("find /etc/init.d/ -regex '/etc/init.d/{service}' -printf "
"'%f\n' -quit | xargs -i service {{}} {action}").format(
service=apply_step['service'], action=action)
elif apply_step['type'] in ('server_down', 'server_up', 'server_reboot'):
devops_action = apply_step['type'].split('server_')[1]
elif apply_step['type'] == 'upload_script':
assert_true(len(apply_step['script'] or '') > 0,
"Step #{0} in apply patch scenario perform '{1}', but "
"script isn't specified".format(apply_step['id'],
apply_step['type']))
assert_true(len(apply_step['upload_path'] or '') > 0,
"Step #{0} in apply patch scenario perform '{1}', but "
"upload path isn't specified".format(apply_step['id'],
apply_step['type']))
command = ('UPLOAD', apply_step['script'], apply_step['upload_path'])
else:
assert_true(len(apply_step['command'] or '') > 0,
"Step #{0} in apply patch scenario perform '{1}', but "
"command isn't specified".format(apply_step['id'],
apply_step['type']))
command = apply_step['command']
remotes = [environment.d_env.get_ssh_to_remote(ip) for ip in remotes_ips] \
if command else []
devops_nodes = devops_nodes if devops_action else []
return command, remotes, devops_action, devops_nodes
def get_errata(path, bug_id):
scenario_path = '{0}/bugs/{1}/errata.yaml'.format(path, bug_id)
if 'http' in urlparse(settings.PATCHING_APPLY_TESTS):
return yaml.load(urlopen(scenario_path).read())
elif os.path.isdir(settings.PATCHING_APPLY_TESTS):
with open(scenario_path) as f:
return yaml.load(f.read())
else:
raise Exception("Path to patching tests doesn't look like URL or local"
" folder: '{0}'".format(settings.PATCHING_APPLY_TESTS))
def get_script_content(path, bug_id, script):
scripts_path = '{0}/bugs/{1}/tests/{2}'.format(path, bug_id, script)
if 'http' in urlparse(settings.PATCHING_APPLY_TESTS):
return urlopen(scripts_path).read()
elif os.path.isdir(settings.PATCHING_APPLY_TESTS):
with open(scripts_path) as f:
return f.read()
def verify_errata(errata):
actions_types = ('patch-scenario', 'verify-scenario')
for action_type in actions_types:
scenario = sorted(errata[action_type]['actions'],
key=lambda k: k['id'])
for step in scenario:
verify_fix_apply_step(step)
def run_actions(environment, slaves, action_type='patch-scenario'):
errata = get_errata(path=settings.PATCHING_APPLY_TESTS,
bug_id=settings.PATCHING_BUG_ID)
scenario = sorted(errata[action_type]['actions'],
key=lambda k: k['id'])
for step in scenario:
command, remotes, devops_action, devops_nodes = \
validate_fix_apply_step(step, environment, slaves)
if 'UPLOAD' in command:
file_name = command[1]
upload_path = command[2]
file_content = get_script_content(
path=settings.PATCHING_APPLY_TESTS,
bug_id=settings.PATCHING_BUG_ID,
script=file_name)
command = "echo '{0}' > {1}/{2}".format(file_content, upload_path,
file_name)
for remote in remotes:
environment.execute_remote_cmd(remote, command)
if devops_action == 'down':
environment.fuel_web.warm_shutdown_nodes(devops_nodes)
elif devops_action == 'up':
environment.fuel_web.warm_start_nodes(devops_nodes)
elif devops_action == 'reboot':
environment.fuel_web.warm_restart_nodes(devops_nodes)
def apply_patches(environment, slaves):
run_actions(environment, slaves, action_type='patch-scenario')
def verify_fix(environment, slaves):
run_actions(environment, slaves, action_type='verify-scenario')
class ApplyPatchActions(object):
def __init__(self):
pass

View File

@ -1045,11 +1045,11 @@ class FuelWebClient(object):
return ip_ranges, expected_ips
def warm_restart_nodes(self, devops_nodes):
logger.info('Reboot (warm restart) nodes %s',
def warm_shutdown_nodes(self, devops_nodes):
logger.info('Shutting down (warm) nodes %s',
[n.name for n in devops_nodes])
for node in devops_nodes:
logger.info('Shutdown node %s', node.name)
logger.debug('Shutdown node %s', node.name)
remote = self.get_ssh_for_node(node.name)
remote.check_call('/sbin/shutdown -Ph now')
@ -1058,14 +1058,23 @@ class FuelWebClient(object):
wait(
lambda: not self.get_nailgun_node_by_devops_node(node)[
'online'], timeout=60 * 10)
logger.info('Start %s node', node.name)
node.destroy()
node.create()
def warm_start_nodes(self, devops_nodes):
logger.info('Starting nodes %s', [n.name for n in devops_nodes])
for node in devops_nodes:
node.create()
for node in devops_nodes:
wait(
lambda: self.get_nailgun_node_by_devops_node(node)['online'],
timeout=60 * 10)
logger.debug('Node {0} became online.'.format(node.name))
def warm_restart_nodes(self, devops_nodes):
logger.info('Reboot (warm restart) nodes %s',
[n.name for n in devops_nodes])
self.warm_shutdown_nodes(devops_nodes)
self.warm_start_nodes(devops_nodes)
def cold_restart_nodes(self, devops_nodes):
logger.info('Cold restart nodes %s',

View File

@ -1,3 +1,21 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import sys
def import_tests():
from tests import test_admin_node # noqa
from tests import test_ceph # noqa
@ -23,15 +41,18 @@ def import_tests():
from tests.plugins.plugin_lbaas import test_plugin_lbaas # noqa
from tests.plugins.plugin_reboot import test_plugin_reboot_task # noqa
from tests import test_multiple_networks # noqa
from tests.tests_patching import test_patching # noqa
def run_tests():
from proboscis import TestProgram # noqa
import_tests()
# Run Proboscis and exit.
TestProgram().run_and_exit()
if __name__ == '__main__':
import_tests()
if any(re.search(r'--group=patching.*', arg) for arg in sys.argv):
from fuelweb_test.helpers.patching import map_test
map_test()
run_tests()

View File

@ -374,3 +374,13 @@ MIRROR_UBUNTU = os.environ.get('MIRROR_UBUNTU', '')
EXTRA_DEB_REPOS = os.environ.get('EXTRA_DEB_REPOS', '')
MIRROR_UBUNTU_PRIORITY = os.environ.get('MIRROR_UBUNTU_PRIORITY', '1001')
EXTRA_DEB_REPOS_PRIORITY = os.environ.get('EXTRA_DEB_REPOS_PRIORITY', '1050')
PATCHING_WEB_DIR = os.environ.get("PATCHING_WEB_DIR", "/var/www/nailgun/")
PATCHING_MIRRORS = os.environ.get("PATCHING_MIRRORS",
CUSTOM_PKGS_MIRROR).split()
PATCHING_BUG_ID = os.environ.get("PATCHING_BUG_ID", None)
PATCHING_PKGS_TESTS = os.environ.get("PATCHING_PKGS_TESTS", "./packages_tests")
PATCHING_APPLY_TESTS = os.environ.get("PATCHING_APPLY_TESTS",
"./patching_tests")
PATCHING_PKGS = os.environ.get("PATCHING_PKGS", None)
PATCHING_SNAPSHOT = os.environ.get("PATCHING_SNAPSHOT", None)

View File

@ -23,7 +23,7 @@ from fuelweb_test.tests.base_test_case import TestBasic
from fuelweb_test import logger
@test(groups=["thread_1", "neutron", "smoke_neutron"])
@test(groups=["thread_1", "neutron", "smoke_neutron", "deployment"])
class NeutronGre(TestBasic):
@test(depends_on=[SetupEnvironment.prepare_slaves_3],

View File

@ -0,0 +1,103 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from proboscis import test
from proboscis.asserts import assert_is_not_none
from fuelweb_test import logger
from fuelweb_test import settings
from fuelweb_test.helpers import patching
from fuelweb_test.helpers.decorators import log_snapshot_on_error
from fuelweb_test.tests.base_test_case import TestBasic
@test(groups=["patching"])
class PatchingTests(TestBasic):
def __init__(self):
self.snapshot_name = settings.PATCHING_SNAPSHOT
self.pkgs = settings.PATCHING_PKGS
super(PatchingTests, self).__init__()
@test(groups=['prepare_patching_environment'])
def prepare_patching_environment(self):
logger.debug('Creating snapshot of environment deployed for patching.')
self.env.make_snapshot(snapshot_name=self.snapshot_name,
is_make=True)
@test(groups=["patching_environment"],
depends_on_groups=['prepare_patching_environment'])
@log_snapshot_on_error
def patching_environment(self):
"""Apply patches on deployed environment
Scenario:
1. Revert snapshot of deployed environment
2. Run Rally benchmark tests and store results
3. Modify DNS settings on master node to make local resolving work
4. Download patched packages on master node and make local repositories
6. Add new local repositories on slave nodes
6. Run packages update on slaves
7. Perform actions required to apply patches
8. Verify that fix works
9. Run OSTF
10. Run Rally benchmark tests and compare results
Duration 15m
Snapshot first_patching_demo
"""
# Step #1
if not self.env.revert_snapshot(self.snapshot_name):
raise PatchingTestException('Environment revert from snapshot "{0}'
'" failed.'.format(self.snapshot_name))
# Check that environment exists and it's ready for patching
cluster_id = self.fuel_web.get_last_created_cluster()
assert_is_not_none(cluster_id, 'Environment for patching not found.')
# Step #2
# Run Rally benchmarks, coming soon...
# Step #3
patching.enable_local_dns_resolving(self.env)
# Step #4
patching_repos = patching.add_remote_repositories(self.env)
# Step #5
slaves = self.fuel_web.client.list_cluster_nodes(cluster_id)
for repo in patching_repos:
patching.connect_slaves_to_repo(self.env, slaves, repo)
# Step #6
patching.update_packages_on_slaves(self.env, slaves, self.pkgs)
# Step #7
logger.info('Applying fix...')
patching.apply_patches(self.env, slaves)
# Step #8
logger.info('Verifying fix...')
patching.verify_fix(self.env, slaves)
# Step #9
self.fuel_web.run_ostf(cluster_id=cluster_id)
# Step #10
# Run Rally benchmarks, compare new results with previous,
# coming soon...
class PatchingTestException(Exception):
pass

View File

@ -0,0 +1,3 @@
system_tests:
tags:
- neutron

View File

@ -0,0 +1,3 @@
system_tests:
tags:
- neutron

View File

@ -0,0 +1,3 @@
system_tests:
tags:
- deployment

View File

@ -0,0 +1,60 @@
provisioning:
projects:
- name: linux
packages:
- bash
deployment:
projects:
- name: neutron
packages:
- neutron-common
- neutron-dhcp-agent
- neutron-l3-agent
- neutron-lbaas-agent
- neutron-metadata-agent
- neutron-metering-agent
- neutron-plugin-bigswitch
- neutron-plugin-bigswitch-agent
- neutron-plugin-brocade
- neutron-plugin-cisco
- neutron-plugin-hyperv
- neutron-plugin-ibm
- neutron-plugin-ibm-agent
- neutron-plugin-linuxbridge
- neutron-plugin-linuxbridge-agent
- neutron-plugin-metaplugin
- neutron-plugin-metering-agent
- neutron-plugin-midonet
- neutron-plugin-ml2
- neutron-plugin-mlnx
- neutron-plugin-mlnx-agent
- neutron-plugin-nec
- neutron-plugin-nec-agent
- neutron-plugin-nicira
- neutron-plugin-oneconvergence
- neutron-plugin-oneconvergence-agent
- neutron-plugin-openflow-agent
- neutron-plugin-openvswitch
- neutron-plugin-openvswitch-agent
- neutron-plugin-plumgrid
- neutron-plugin-ryu
- neutron-plugin-ryu-agent
- neutron-plugin-vmware
- neutron-plugin-vpn-agent
- neutron-server
- neutron-vpn-agent
- python-neutron
- name: nova
packages:
- nova-api
- nova-compute
master:
projects:
- name: nailgun
packages:
- nailgun
bootstrap:
projects:
- name: nailgun-agent
packages: nailgun-agent

View File

@ -0,0 +1,3 @@
system_tests:
tags:
- neutron

View File

@ -0,0 +1,3 @@
system_tests:
tags:
- neutron

View File

@ -0,0 +1,3 @@
system_tests:
tags:
- deployment

View File

@ -0,0 +1,24 @@
provisioning:
projects:
- name: linux
packages:
- bash
deployment:
projects:
- name: neutron
packages:
- openstack-neutron
- python-neutron
- name: nova
packages:
- nova-api
- nova-compute
master:
projects:
- name: nailgun
packages:
- nailgun
bootstrap:
projects:
- name: nailgun-agent
packages: nailgun-agent