Merge "Sync charm-helpers"

This commit is contained in:
Zuul 2018-11-08 23:01:00 +00:00 committed by Gerrit Code Review
commit 9b5822471a
14 changed files with 226 additions and 39 deletions

View File

@ -23,22 +23,22 @@ import subprocess
import sys
try:
import six # flake8: noqa
import six # NOQA:F401
except ImportError:
if sys.version_info.major == 2:
subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])
else:
subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])
import six # flake8: noqa
import six # NOQA:F401
try:
import yaml # flake8: noqa
import yaml # NOQA:F401
except ImportError:
if sys.version_info.major == 2:
subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])
else:
subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])
import yaml # flake8: noqa
import yaml # NOQA:F401
# Holds a list of mapping of mangled function names that have been deprecated

View File

@ -14,6 +14,7 @@
import os
import re
import six
import subprocess
@ -95,6 +96,8 @@ class ApacheConfContext(object):
ctxt = settings['hardening']
out = subprocess.check_output(['apache2', '-v'])
if six.PY3:
out = out.decode('utf-8')
ctxt['apache_version'] = re.search(r'.+version: Apache/(.+?)\s.+',
out).group(1)
ctxt['apache_icondir'] = '/usr/share/apache2/icons/'

View File

@ -15,7 +15,7 @@
import re
import subprocess
from six import string_types
import six
from charmhelpers.core.hookenv import (
log,
@ -35,7 +35,7 @@ class DisabledModuleAudit(BaseAudit):
def __init__(self, modules):
if modules is None:
self.modules = []
elif isinstance(modules, string_types):
elif isinstance(modules, six.string_types):
self.modules = [modules]
else:
self.modules = modules
@ -69,6 +69,8 @@ class DisabledModuleAudit(BaseAudit):
def _get_loaded_modules():
"""Returns the modules which are enabled in Apache."""
output = subprocess.check_output(['apache2ctl', '-M'])
if six.PY3:
output = output.decode('utf-8')
modules = []
for line in output.splitlines():
# Each line of the enabled module output looks like:

View File

@ -27,6 +27,8 @@ from charmhelpers.contrib.hardening.ssh.checks import run_ssh_checks
from charmhelpers.contrib.hardening.mysql.checks import run_mysql_checks
from charmhelpers.contrib.hardening.apache.checks import run_apache_checks
_DISABLE_HARDENING_FOR_UNIT_TEST = False
def harden(overrides=None):
"""Hardening decorator.
@ -47,16 +49,28 @@ def harden(overrides=None):
provided with 'harden' config.
:returns: Returns value returned by decorated function once executed.
"""
if overrides is None:
overrides = []
def _harden_inner1(f):
log("Hardening function '%s'" % (f.__name__), level=DEBUG)
# As this has to be py2.7 compat, we can't use nonlocal. Use a trick
# to capture the dictionary that can then be updated.
_logged = {'done': False}
def _harden_inner2(*args, **kwargs):
# knock out hardening via a config var; normally it won't get
# disabled.
if _DISABLE_HARDENING_FOR_UNIT_TEST:
return f(*args, **kwargs)
if not _logged['done']:
log("Hardening function '%s'" % (f.__name__), level=DEBUG)
_logged['done'] = True
RUN_CATALOG = OrderedDict([('os', run_os_checks),
('ssh', run_ssh_checks),
('mysql', run_mysql_checks),
('apache', run_apache_checks)])
enabled = overrides or (config("harden") or "").split()
enabled = overrides[:] or (config("harden") or "").split()
if enabled:
modules_to_run = []
# modules will always be performed in the following order

View File

@ -28,6 +28,7 @@ import json
import re
from charmhelpers.core.hookenv import (
expected_related_units,
log,
relation_set,
charm_name,
@ -110,12 +111,17 @@ def assert_charm_supports_dns_ha():
def expect_ha():
""" Determine if the unit expects to be in HA
Check for VIP or dns-ha settings which indicate the unit should expect to
be related to hacluster.
Check juju goal-state if ha relation is expected, check for VIP or dns-ha
settings which indicate the unit should expect to be related to hacluster.
@returns boolean
"""
return config('vip') or config('dns-ha')
ha_related_units = []
try:
ha_related_units = list(expected_related_units(reltype='ha'))
except (NotImplementedError, KeyError):
pass
return len(ha_related_units) > 0 or config('vip') or config('dns-ha')
def generate_ha_relation_data(service):

View File

@ -186,7 +186,7 @@ SWIFT_CODENAMES = OrderedDict([
('queens',
['2.16.0', '2.17.0']),
('rocky',
['2.18.0']),
['2.18.0', '2.19.0']),
])
# >= Liberty version->codename mapping
@ -375,7 +375,7 @@ def get_swift_codename(version):
return codenames[0]
# NOTE: fallback - attempt to match with just major.minor version
match = re.match('^(\d+)\.(\d+)', version)
match = re.match(r'^(\d+)\.(\d+)', version)
if match:
major_minor_version = match.group(0)
for codename, versions in six.iteritems(SWIFT_CODENAMES):
@ -395,7 +395,7 @@ def get_os_codename_package(package, fatal=True):
out = subprocess.check_output(cmd)
if six.PY3:
out = out.decode('UTF-8')
except subprocess.CalledProcessError as e:
except subprocess.CalledProcessError:
return None
lines = out.split('\n')
for line in lines:
@ -427,11 +427,11 @@ def get_os_codename_package(package, fatal=True):
vers = apt.upstream_version(pkg.current_ver.ver_str)
if 'swift' in pkg.name:
# Fully x.y.z match for swift versions
match = re.match('^(\d+)\.(\d+)\.(\d+)', vers)
match = re.match(r'^(\d+)\.(\d+)\.(\d+)', vers)
else:
# x.y match only for 20XX.X
# and ignore patch level for other packages
match = re.match('^(\d+)\.(\d+)', vers)
match = re.match(r'^(\d+)\.(\d+)', vers)
if match:
vers = match.group(0)
@ -1450,20 +1450,33 @@ def pausable_restart_on_change(restart_map, stopstart=False,
see core.utils.restart_on_change() for more details.
Note restart_map can be a callable, in which case, restart_map is only
evaluated at runtime. This means that it is lazy and the underlying
function won't be called if the decorated function is never called. Note,
retains backwards compatibility for passing a non-callable dictionary.
@param f: the function to decorate
@param restart_map: the restart map {conf_file: [services]}
@param restart_map: (optionally callable, which then returns the
restart_map) the restart map {conf_file: [services]}
@param stopstart: DEFAULT false; whether to stop, start or just restart
@returns decorator to use a restart_on_change with pausability
"""
def wrap(f):
# py27 compatible nonlocal variable. When py3 only, replace with
# nonlocal keyword
__restart_map_cache = {'cache': None}
@functools.wraps(f)
def wrapped_f(*args, **kwargs):
if is_unit_paused_set():
return f(*args, **kwargs)
if __restart_map_cache['cache'] is None:
__restart_map_cache['cache'] = restart_map() \
if callable(restart_map) else restart_map
# otherwise, normal restart_on_change functionality
return restart_on_change_helper(
(lambda: f(*args, **kwargs)), restart_map, stopstart,
restart_functions)
(lambda: f(*args, **kwargs)), __restart_map_cache['cache'],
stopstart, restart_functions)
return wrapped_f
return wrap
@ -1733,3 +1746,31 @@ def is_unit_upgrading_set():
return not(not(kv.get('unit-upgrading')))
except Exception:
return False
def series_upgrade_prepare(pause_unit_helper=None, configs=None):
""" Run common series upgrade prepare tasks.
:param pause_unit_helper: function: Function to pause unit
:param configs: OSConfigRenderer object: Configurations
:returns None:
"""
set_unit_upgrading()
if pause_unit_helper and configs:
if not is_unit_paused_set():
pause_unit_helper(configs)
def series_upgrade_complete(resume_unit_helper=None, configs=None):
""" Run common series upgrade complete tasks.
:param resume_unit_helper: function: Function to resume unit
:param configs: OSConfigRenderer object: Configurations
:returns None:
"""
clear_unit_paused()
clear_unit_upgrading()
if configs:
configs.write_all()
if resume_unit_helper:
resume_unit_helper(configs)

View File

@ -39,7 +39,7 @@ def loopback_devices():
devs = [d.strip().split(' ') for d in
check_output(cmd).splitlines() if d != '']
for dev, _, f in devs:
loopbacks[dev.replace(':', '')] = re.search('\((\S+)\)', f).groups()[0]
loopbacks[dev.replace(':', '')] = re.search(r'\((\S+)\)', f).groups()[0]
return loopbacks

View File

@ -48,6 +48,7 @@ INFO = "INFO"
DEBUG = "DEBUG"
TRACE = "TRACE"
MARKER = object()
SH_MAX_ARG = 131071
cache = {}
@ -98,7 +99,7 @@ def log(message, level=None):
command += ['-l', level]
if not isinstance(message, six.string_types):
message = repr(message)
command += [message]
command += [message[:SH_MAX_ARG]]
# Missing juju-log should not cause failures in unit tests
# Send log output to stderr
try:
@ -509,6 +510,67 @@ def related_units(relid=None):
subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
def expected_peer_units():
"""Get a generator for units we expect to join peer relation based on
goal-state.
The local unit is excluded from the result to make it easy to gauge
completion of all peers joining the relation with existing hook tools.
Example usage:
log('peer {} of {} joined peer relation'
.format(len(related_units()),
len(list(expected_peer_units()))))
This function will raise NotImplementedError if used with juju versions
without goal-state support.
:returns: iterator
:rtype: types.GeneratorType
:raises: NotImplementedError
"""
if not has_juju_version("2.4.0"):
# goal-state first appeared in 2.4.0.
raise NotImplementedError("goal-state")
_goal_state = goal_state()
return (key for key in _goal_state['units']
if '/' in key and key != local_unit())
def expected_related_units(reltype=None):
"""Get a generator for units we expect to join relation based on
goal-state.
Note that you can not use this function for the peer relation, take a look
at expected_peer_units() for that.
This function will raise KeyError if you request information for a
relation type for which juju goal-state does not have information. It will
raise NotImplementedError if used with juju versions without goal-state
support.
Example usage:
log('participant {} of {} joined relation {}'
.format(len(related_units()),
len(list(expected_related_units())),
relation_type()))
:param reltype: Relation type to list data for, default is to list data for
the realtion type we are currently executing a hook for.
:type reltype: str
:returns: iterator
:rtype: types.GeneratorType
:raises: KeyError, NotImplementedError
"""
if not has_juju_version("2.4.4"):
# goal-state existed in 2.4.0, but did not list individual units to
# join a relation in 2.4.1 through 2.4.3. (LP: #1794739)
raise NotImplementedError("goal-state relation unit count")
reltype = reltype or relation_type()
_goal_state = goal_state()
return (key for key in _goal_state['relations'][reltype] if '/' in key)
@cached
def relation_for_unit(unit=None, rid=None):
"""Get the json represenation of a unit's relation"""
@ -997,6 +1059,7 @@ def application_version_set(version):
@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
@cached
def goal_state():
"""Juju goal state values"""
cmd = ['goal-state', '--format=json']

View File

@ -34,13 +34,13 @@ import six
from contextlib import contextmanager
from collections import OrderedDict
from .hookenv import log, DEBUG, local_unit
from .hookenv import log, INFO, DEBUG, local_unit, charm_name
from .fstab import Fstab
from charmhelpers.osplatform import get_platform
__platform__ = get_platform()
if __platform__ == "ubuntu":
from charmhelpers.core.host_factory.ubuntu import (
from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401
service_available,
add_new_group,
lsb_release,
@ -48,7 +48,7 @@ if __platform__ == "ubuntu":
CompareHostReleases,
) # flake8: noqa -- ignore F401 for this import
elif __platform__ == "centos":
from charmhelpers.core.host_factory.centos import (
from charmhelpers.core.host_factory.centos import ( # NOQA:F401
service_available,
add_new_group,
lsb_release,
@ -58,6 +58,7 @@ elif __platform__ == "centos":
UPDATEDB_PATH = '/etc/updatedb.conf'
def service_start(service_name, **kwargs):
"""Start a system service.
@ -287,8 +288,8 @@ def service_running(service_name, **kwargs):
for key, value in six.iteritems(kwargs):
parameter = '%s=%s' % (key, value)
cmd.append(parameter)
output = subprocess.check_output(cmd,
stderr=subprocess.STDOUT).decode('UTF-8')
output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT).decode('UTF-8')
except subprocess.CalledProcessError:
return False
else:
@ -442,7 +443,7 @@ def add_user_to_group(username, group):
def chage(username, lastday=None, expiredate=None, inactive=None,
mindays=None, maxdays=None, root=None, warndays=None):
mindays=None, maxdays=None, root=None, warndays=None):
"""Change user password expiry information
:param str username: User to update
@ -482,8 +483,10 @@ def chage(username, lastday=None, expiredate=None, inactive=None,
cmd.append(username)
subprocess.check_call(cmd)
remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1')
def rsync(from_path, to_path, flags='-r', options=None, timeout=None):
"""Replicate the contents of a path"""
options = options or ['--delete', '--executability']
@ -535,13 +538,15 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
# lets see if we can grab the file and compare the context, to avoid doing
# a write.
existing_content = None
existing_uid, existing_gid = None, None
existing_uid, existing_gid, existing_perms = None, None, None
try:
with open(path, 'rb') as target:
existing_content = target.read()
stat = os.stat(path)
existing_uid, existing_gid = stat.st_uid, stat.st_gid
except:
existing_uid, existing_gid, existing_perms = (
stat.st_uid, stat.st_gid, stat.st_mode
)
except Exception:
pass
if content != existing_content:
log("Writing file {} {}:{} {:o}".format(path, owner, group, perms),
@ -554,7 +559,7 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
target.write(content)
return
# the contents were the same, but we might still need to change the
# ownership.
# ownership or permissions.
if existing_uid != uid:
log("Changing uid on already existing content: {} -> {}"
.format(existing_uid, uid), level=DEBUG)
@ -563,6 +568,10 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
log("Changing gid on already existing content: {} -> {}"
.format(existing_gid, gid), level=DEBUG)
os.chown(path, -1, gid)
if existing_perms != perms:
log("Changing permissions on existing content: {} -> {}"
.format(existing_perms, perms), level=DEBUG)
os.chmod(path, perms)
def fstab_remove(mp):
@ -827,7 +836,7 @@ def list_nics(nic_type=None):
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
ip_output = (line.strip() for line in ip_output if line)
key = re.compile('^[0-9]+:\s+(.+):')
key = re.compile(r'^[0-9]+:\s+(.+):')
for line in ip_output:
matched = re.search(key, line)
if matched:
@ -1040,3 +1049,27 @@ def modulo_distribution(modulo=3, wait=30, non_zero_wait=False):
return modulo * wait
else:
return calculated_wait_time
def install_ca_cert(ca_cert, name=None):
"""
Install the given cert as a trusted CA.
The ``name`` is the stem of the filename where the cert is written, and if
not provided, it will default to ``juju-{charm_name}``.
If the cert is empty or None, or is unchanged, nothing is done.
"""
if not ca_cert:
return
if not isinstance(ca_cert, bytes):
ca_cert = ca_cert.encode('utf8')
if not name:
name = 'juju-{}'.format(charm_name())
cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name)
new_hash = hashlib.md5(ca_cert).hexdigest()
if file_hash(cert_file) == new_hash:
return
log("Installing new CA cert at: {}".format(cert_file), level=INFO)
write_file(cert_file, ca_cert)
subprocess.check_call(['update-ca-certificates', '--fresh'])

View File

@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import (
__platform__ = get_platform()
if __platform__ == "ubuntu":
from charmhelpers.core.kernel_factory.ubuntu import (
from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401
persistent_modprobe,
update_initramfs,
) # flake8: noqa -- ignore F401 for this import
elif __platform__ == "centos":
from charmhelpers.core.kernel_factory.centos import (
from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401
persistent_modprobe,
update_initramfs,
) # flake8: noqa -- ignore F401 for this import

View File

@ -84,6 +84,7 @@ module = "charmhelpers.fetch.%s" % __platform__
fetch = importlib.import_module(module)
filter_installed_packages = fetch.filter_installed_packages
filter_missing_packages = fetch.filter_missing_packages
install = fetch.apt_install
upgrade = fetch.apt_upgrade
update = _fetch_update = fetch.apt_update
@ -96,6 +97,7 @@ if __platform__ == "ubuntu":
apt_update = fetch.apt_update
apt_upgrade = fetch.apt_upgrade
apt_purge = fetch.apt_purge
apt_autoremove = fetch.apt_autoremove
apt_mark = fetch.apt_mark
apt_hold = fetch.apt_hold
apt_unhold = fetch.apt_unhold

View File

@ -13,7 +13,7 @@
# limitations under the License.
import os
from subprocess import check_call
from subprocess import STDOUT, check_output
from charmhelpers.fetch import (
BaseFetchHandler,
UnhandledSource,
@ -55,7 +55,7 @@ class BzrUrlFetchHandler(BaseFetchHandler):
cmd = ['bzr', 'branch']
cmd += cmd_opts
cmd += [source, dest]
check_call(cmd)
check_output(cmd, stderr=STDOUT)
def install(self, source, dest=None, revno=None):
url_parts = self.parse_url(source)

View File

@ -13,7 +13,7 @@
# limitations under the License.
import os
from subprocess import check_call, CalledProcessError
from subprocess import check_output, CalledProcessError, STDOUT
from charmhelpers.fetch import (
BaseFetchHandler,
UnhandledSource,
@ -50,7 +50,7 @@ class GitUrlFetchHandler(BaseFetchHandler):
cmd = ['git', 'clone', source, dest, '--branch', branch]
if depth:
cmd.extend(['--depth', depth])
check_call(cmd)
check_output(cmd, stderr=STDOUT)
def install(self, source, branch="master", dest=None, depth=None):
url_parts = self.parse_url(source)

View File

@ -189,6 +189,18 @@ def filter_installed_packages(packages):
return _pkgs
def filter_missing_packages(packages):
"""Return a list of packages that are installed.
:param packages: list of packages to evaluate.
:returns list: Packages that are installed.
"""
return list(
set(packages) -
set(filter_installed_packages(packages))
)
def apt_cache(in_memory=True, progress=None):
"""Build and return an apt cache."""
from apt import apt_pkg
@ -248,6 +260,14 @@ def apt_purge(packages, fatal=False):
_run_apt_command(cmd, fatal)
def apt_autoremove(purge=True, fatal=False):
"""Purge one or more packages."""
cmd = ['apt-get', '--assume-yes', 'autoremove']
if purge:
cmd.append('--purge')
_run_apt_command(cmd, fatal)
def apt_mark(packages, mark, fatal=False):
"""Flag one or more packages using apt-mark."""
log("Marking {} as {}".format(packages, mark))
@ -274,7 +294,7 @@ def apt_unhold(packages, fatal=False):
def import_key(key):
"""Import an ASCII Armor key.
/!\ A Radix64 format keyid is also supported for backwards
A Radix64 format keyid is also supported for backwards
compatibility, but should never be used; the key retrieval
mechanism is insecure and subject to man-in-the-middle attacks
voiding all signature checks using that key.
@ -434,6 +454,9 @@ def _add_apt_repository(spec):
:param spec: the parameter to pass to add_apt_repository
"""
if '{series}' in spec:
series = lsb_release()['DISTRIB_CODENAME']
spec = spec.replace('{series}', series)
_run_with_retries(['add-apt-repository', '--yes', spec])