Sync charm-helpers to enable swift 2.10.0

Change-Id: I3373eedd4fb98d521bf1123f5513319ef953a347
This commit is contained in:
Corey Bryant 2016-09-29 16:42:23 +00:00
parent cae0a2c4f5
commit 2149d2dea3
11 changed files with 86 additions and 26 deletions

View File

@ -14,6 +14,11 @@
import os
from charmhelpers.contrib.network.ip import (
get_address_in_network,
get_iface_addr,
is_ip,
)
from charmhelpers.core.hookenv import (
log,
DEBUG,
@ -121,6 +126,36 @@ class SSHConfigContext(object):
return cipher[weak_ciphers]
def get_listening(self, listen=['0.0.0.0']):
"""Returns a list of addresses SSH can list on
Turns input into a sensible list of IPs SSH can listen on. Input
must be a python list of interface names, IPs and/or CIDRs.
:param listen: list of IPs, CIDRs, interface names
:returns: list of IPs available on the host
"""
if listen == ['0.0.0.0']:
return listen
value = []
for network in listen:
try:
ip = get_address_in_network(network=network, fatal=True)
except ValueError:
if is_ip(network):
ip = network
else:
try:
ip = get_iface_addr(iface=network, fatal=False)[0]
except IndexError:
continue
value.append(ip)
if value == []:
return ['0.0.0.0']
return value
def __call__(self):
settings = utils.get_settings('ssh')
if settings['common']['network_ipv6_enable']:
@ -180,7 +215,7 @@ class SSHDConfigContext(SSHConfigContext):
addr_family = 'inet'
ctxt = {
'ssh_ip': settings['server']['listen_to'],
'ssh_ip': self.get_listening(settings['server']['listen_to']),
'password_auth_allowed':
settings['server']['password_authentication'],
'ports': settings['common']['ports'],

View File

@ -406,7 +406,7 @@ def is_ip(address):
# Test to see if already an IPv4/IPv6 address
address = netaddr.IPAddress(address)
return True
except netaddr.AddrFormatError:
except (netaddr.AddrFormatError, ValueError):
return False

View File

@ -258,6 +258,7 @@ class OpenStackAmuletDeployment(AmuletDeployment):
('vivid', 'kilo'),
('wily', 'liberty'),
('xenial', 'mitaka'),
('yakkety', 'newton'),
])
if self.openstack:
os_origin = self.openstack.split(':')[1]

View File

@ -1421,9 +1421,9 @@ class InternalEndpointContext(OSContextGenerator):
class AppArmorContext(OSContextGenerator):
"""Base class for apparmor contexts."""
def __init__(self):
def __init__(self, profile_name=None):
self._ctxt = None
self.aa_profile = None
self.aa_profile = profile_name
self.aa_utils_packages = ['apparmor-utils']
@property
@ -1442,6 +1442,8 @@ class AppArmorContext(OSContextGenerator):
if config('aa-profile-mode') in ['disable', 'enforce', 'complain']:
ctxt = {'aa_profile_mode': config('aa-profile-mode'),
'ubuntu_release': lsb_release()['DISTRIB_RELEASE']}
if self.aa_profile:
ctxt['aa_profile'] = self.aa_profile
else:
ctxt = None
return ctxt

View File

@ -30,6 +30,7 @@ from charmhelpers.contrib.hahelpers.cluster import is_clustered
PUBLIC = 'public'
INTERNAL = 'int'
ADMIN = 'admin'
ACCESS = 'access'
ADDRESS_MAP = {
PUBLIC: {
@ -49,7 +50,13 @@ ADDRESS_MAP = {
'config': 'os-admin-network',
'fallback': 'private-address',
'override': 'os-admin-hostname',
}
},
ACCESS: {
'binding': 'access',
'config': 'access-network',
'fallback': 'private-address',
'override': 'os-access-hostname',
},
}

View File

@ -249,6 +249,8 @@ def neutron_plugins():
plugins['nsx']['server_packages'].remove('neutron-plugin-vmware')
plugins['nsx']['server_packages'].append('python-vmware-nsx')
plugins['nsx']['config'] = '/etc/neutron/nsx.ini'
plugins['vsp']['driver'] = (
'nuage_neutron.plugins.nuage.plugin.NuagePlugin')
return plugins

View File

@ -81,7 +81,12 @@ from charmhelpers.core.host import (
service_resume,
restart_on_change_helper,
)
from charmhelpers.fetch import apt_install, apt_cache, install_remote
from charmhelpers.fetch import (
apt_install,
apt_cache,
install_remote,
get_upstream_version
)
from charmhelpers.contrib.storage.linux.utils import is_block_device, zap_disk
from charmhelpers.contrib.storage.linux.loopback import ensure_loopback_device
from charmhelpers.contrib.openstack.exceptions import OSContextError
@ -146,7 +151,7 @@ SWIFT_CODENAMES = OrderedDict([
('mitaka',
['2.5.0', '2.6.0', '2.7.0']),
('newton',
['2.8.0', '2.9.0']),
['2.8.0', '2.9.0', '2.10.0']),
])
# >= Liberty version->codename mapping
@ -1894,25 +1899,10 @@ def config_flags_parser(config_flags):
def os_application_version_set(package):
'''Set version of application for Juju 2.0 and later'''
import apt_pkg as apt
cache = apt_cache()
application_version = None
application_codename = os_release(package)
try:
pkg = cache[package]
if not pkg.current_ver:
juju_log('Package {} is not currently installed.'.format(package),
DEBUG)
else:
application_version = apt.upstream_version(pkg.current_ver.ver_str)
except:
juju_log('Package {} has no installation candidate.'.format(package),
DEBUG)
application_version = get_upstream_version(package)
# NOTE(jamespage) if not able to figure out package version, fallback to
# openstack codename version detection.
if not application_version:
application_version_set(application_codename)
application_version_set(os_release(package))
else:
application_version_set(application_version)

View File

@ -92,6 +92,7 @@ if __platform__ == "ubuntu":
apt_mark = fetch.apt_mark
apt_hold = fetch.apt_hold
apt_unhold = fetch.apt_unhold
get_upstream_version = fetch.get_upstream_version
elif __platform__ == "centos":
yum_search = fetch.yum_search

View File

@ -314,3 +314,23 @@ def _run_apt_command(cmd, fatal=False):
else:
subprocess.call(cmd, env=env)
def get_upstream_version(package):
"""Determine upstream version based on installed package
@returns None (if not installed) or the upstream version
"""
import apt_pkg
cache = apt_cache()
try:
pkg = cache[package]
except:
# the package is unknown to the current apt cache.
return None
if not pkg.current_ver:
# package is known, but no version is currently installed.
return None
return apt_pkg.upstream_version(pkg.current_ver.ver_str)

View File

@ -47,11 +47,12 @@ def execd_submodule_paths(command, execd_dir=None):
yield path
def execd_run(command, execd_dir=None, die_on_error=False, stderr=None):
def execd_run(command, execd_dir=None, die_on_error=True, stderr=subprocess.STDOUT):
"""Run command for each module within execd_dir which defines it."""
for submodule_path in execd_submodule_paths(command, execd_dir):
try:
subprocess.check_call(submodule_path, shell=True, stderr=stderr)
subprocess.check_output(submodule_path, stderr=stderr,
universal_newlines=True)
except subprocess.CalledProcessError as e:
hookenv.log("Error ({}) running {}. Output: {}".format(
e.returncode, e.cmd, e.output))

View File

@ -258,6 +258,7 @@ class OpenStackAmuletDeployment(AmuletDeployment):
('vivid', 'kilo'),
('wily', 'liberty'),
('xenial', 'mitaka'),
('yakkety', 'newton'),
])
if self.openstack:
os_origin = self.openstack.split(':')[1]