When resuming, exclude haproxy

When resuming services exclude those managed by hacluster, in
this case haproxy. If pacemaker lacks quorum it may shut haproxy
down which will cause this charm to error.

Charmhelper sync included to bring in required
get_managed_services_and_ports method.

Change-Id: I85c380a2cffcd18031a32b6e3eb422aa5ff14994
This commit is contained in:
Liam Young 2020-01-23 11:34:54 +00:00
parent 8b1743129d
commit ab8b60a21c
8 changed files with 137 additions and 10 deletions

View File

@ -35,6 +35,9 @@ def _add_path(path):
_add_path(_parent)
from charmhelpers.contrib.hahelpers.cluster import (
get_managed_services_and_ports,
)
from charmhelpers.core.host import service_pause, service_resume
from charmhelpers.core.hookenv import (
action_fail,
@ -90,7 +93,8 @@ def resume(args):
@raises Exception if any services fail to start
"""
for service in args.services:
_services, _ = get_managed_services_and_ports(args.services, [])
for service in _services:
started = service_resume(service)
if not started:
raise Exception("{} didn't start cleanly.".format(service))

View File

@ -25,6 +25,7 @@ Helpers for clustering and determining "cluster leadership" and other
clustering-related helpers.
"""
import functools
import subprocess
import os
import time
@ -281,6 +282,10 @@ def determine_apache_port(public_port, singlenode_mode=False):
return public_port - (i * 10)
determine_apache_port_single = functools.partial(
determine_apache_port, singlenode_mode=True)
def get_hacluster_config(exclude_keys=None):
'''
Obtains all relevant configuration from charm configuration required
@ -404,3 +409,43 @@ def distributed_wait(modulo=None, wait=None, operation_name='operation'):
log(msg, DEBUG)
status_set('maintenance', msg)
time.sleep(calculated_wait)
def get_managed_services_and_ports(services, external_ports,
external_services=None,
port_conv_f=determine_apache_port_single):
"""Get the services and ports managed by this charm.
Return only the services and corresponding ports that are managed by this
charm. This excludes haproxy when there is a relation with hacluster. This
is because this charm passes responsability for stopping and starting
haproxy to hacluster.
Similarly, if a relation with hacluster exists then the ports returned by
this method correspond to those managed by the apache server rather than
haproxy.
:param services: List of services.
:type services: List[str]
:param external_ports: List of ports managed by external services.
:type external_ports: List[int]
:param external_services: List of services to be removed if ha relation is
present.
:type external_services: List[str]
:param port_conv_f: Function to apply to ports to calculate the ports
managed by services controlled by this charm.
:type port_convert_func: f()
:returns: A tuple containing a list of services first followed by a list of
ports.
:rtype: Tuple[List[str], List[int]]
"""
if external_services is None:
external_services = ['haproxy']
if relation_ids('ha'):
for svc in external_services:
try:
services.remove(svc)
except ValueError:
pass
external_ports = [port_conv_f(p) for p in external_ports]
return services, external_ports

View File

@ -52,7 +52,7 @@ class RestrictedPackages(BaseAudit):
def __init__(self, pkgs, **kwargs):
super(RestrictedPackages, self).__init__(**kwargs)
if isinstance(pkgs, string_types) or not hasattr(pkgs, '__iter__'):
self.pkgs = [pkgs]
self.pkgs = pkgs.split()
else:
self.pkgs = pkgs
@ -100,4 +100,5 @@ class RestrictedPackages(BaseAudit):
apt_purge(pkg.name)
def is_virtual_package(self, pkg):
return pkg.has_provides and not pkg.has_versions
return (pkg.get('has_provides', False) and
not pkg.get('has_versions', False))

View File

@ -44,6 +44,7 @@ from charmhelpers.core.hookenv import (
INFO,
ERROR,
related_units,
relation_get,
relation_ids,
relation_set,
status_set,
@ -331,6 +332,10 @@ PACKAGE_CODENAMES = {
DEFAULT_LOOPBACK_SIZE = '5G'
DB_SERIES_UPGRADING_KEY = 'cluster-series-upgrading'
DB_MAINTENANCE_KEYS = [DB_SERIES_UPGRADING_KEY]
class CompareOpenStackReleases(BasicStringComparator):
"""Provide comparisons of OpenStack releases.
@ -1912,3 +1917,33 @@ def set_db_initialised():
"""
juju_log('Setting db-initialised to True', 'DEBUG')
leader_set({'db-initialised': True})
def is_db_maintenance_mode(relid=None):
"""Check relation data from notifications of db in maintenance mode.
:returns: Whether db has notified it is in maintenance mode.
:rtype: bool
"""
juju_log('Checking for maintenance notifications', 'DEBUG')
if relid:
r_ids = [relid]
else:
r_ids = relation_ids('shared-db')
rids_units = [(r, u) for r in r_ids for u in related_units(r)]
notifications = []
for r_id, unit in rids_units:
settings = relation_get(unit=unit, rid=r_id)
for key, value in settings.items():
if value and key in DB_MAINTENANCE_KEYS:
juju_log(
'Unit: {}, Key: {}, Value: {}'.format(unit, key, value),
'DEBUG')
try:
notifications.append(bool_from_string(value))
except ValueError:
juju_log(
'Could not discern bool from {}'.format(value),
'WARN')
pass
return True in notifications

View File

@ -38,6 +38,7 @@ so with this we get rid of the dependency.
import locale
import os
import subprocess
import sys
class _container(dict):
@ -59,6 +60,13 @@ class Cache(object):
def __init__(self, progress=None):
pass
def __contains__(self, package):
try:
pkg = self.__getitem__(package)
return pkg is not None
except KeyError:
return False
def __getitem__(self, package):
"""Get information about a package from apt and dpkg databases.
@ -178,6 +186,28 @@ class Cache(object):
return pkgs
class Config(_container):
def __init__(self):
super(Config, self).__init__(self._populate())
def _populate(self):
cfgs = {}
cmd = ['apt-config', 'dump']
output = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
universal_newlines=True)
for line in output.splitlines():
if not line.startswith("CommandLine"):
k, v = line.split(" ", 1)
cfgs[k] = v.strip(";").strip("\"")
return cfgs
# Backwards compatibility with old apt_pkg module
sys.modules[__name__].config = Config()
def init():
"""Compability shim that does nothing."""
pass

View File

@ -41,6 +41,7 @@ from charmhelpers.contrib.hahelpers.cluster import (
is_elected_leader,
peer_units,
determine_api_port,
get_managed_services_and_ports,
)
from charmhelpers.core.hookenv import (
log,
@ -1464,6 +1465,7 @@ def assess_status_func(configs, check_services=None):
"""
if check_services is None:
check_services = services()
check_services, _ = get_managed_services_and_ports(check_services, [])
required_interfaces = {}
if relation_ids('identity-service'):
required_interfaces['identity'] = ['identity-service']

View File

@ -108,12 +108,19 @@ class ResumeTestCase(CharmTestCase):
def setUp(self):
super(ResumeTestCase, self).setUp(
actions.actions, ["service_resume", "clear_unit_paused",
"assess_status"])
"assess_status",
"get_managed_services_and_ports"])
class FakeArgs(object):
services = ['swift-proxy', 'haproxy', 'memcached', 'apache2']
self.args = FakeArgs()
def fake_svcs_and_ports(services, ports):
services.remove('haproxy')
return services, ports
self.get_managed_services_and_ports.side_effect = fake_svcs_and_ports
def test_resumes_services(self):
"""Resume action resumes all of the Swift services."""
resume_calls = []
@ -125,14 +132,14 @@ class ResumeTestCase(CharmTestCase):
self.service_resume.side_effect = fake_service_resume
actions.actions.resume(self.args)
self.assertEqual(
resume_calls, ['swift-proxy', 'haproxy', 'memcached', 'apache2'])
resume_calls, ['swift-proxy', 'memcached', 'apache2'])
def test_bails_out_early_on_error(self):
"""Resume action fails early if there are errors starting a service."""
resume_calls = []
def maybe_kill(svc):
if svc == "haproxy":
if svc == "apache2":
return False
else:
resume_calls.append(svc)
@ -140,9 +147,9 @@ class ResumeTestCase(CharmTestCase):
self.service_resume.side_effect = maybe_kill
self.assertRaisesRegexp(
Exception, "haproxy didn't start cleanly.",
Exception, "apache2 didn't start cleanly.",
actions.actions.resume, self.args)
self.assertEqual(resume_calls, ['swift-proxy'])
self.assertEqual(resume_calls, ['swift-proxy', 'memcached'])
def test_resume_sets_value(self):
"""Resume action sets the unit-paused value to False."""

View File

@ -547,13 +547,16 @@ class SwiftUtilsTestCase(unittest.TestCase):
swift_utils.VERSION_PACKAGE
)
@mock.patch.object(swift_utils, 'get_managed_services_and_ports')
@mock.patch.object(swift_utils, 'relation_ids')
@mock.patch.object(swift_utils, 'services')
@mock.patch.object(swift_utils, 'make_assess_status_func')
def test_assess_status_func(self,
make_assess_status_func,
services,
relation_ids):
relation_ids,
get_managed_services_and_ports):
get_managed_services_and_ports.return_value = (['s1'], [])
relation_ids.return_value = True
services.return_value = 's1'
required_interfaces = {'identity': ['identity-service']}
@ -563,7 +566,7 @@ class SwiftUtilsTestCase(unittest.TestCase):
make_assess_status_func.assert_called_once_with(
'test-config', required_interfaces,
charm_func=swift_utils.customer_check_assess_status,
services='s1', ports=None)
services=['s1'], ports=None)
@mock.patch.object(swift_utils, 'os_release')
@mock.patch.object(swift_utils, 'leader_set')