[james-page] Managed ceph.conf using alternatives

This commit is contained in:
James Page 2013-11-18 12:17:59 +00:00
commit 0b11d065cb
11 changed files with 372 additions and 135 deletions

View File

@ -5,3 +5,4 @@ include:
- fetch - fetch
- contrib.storage.linux: - contrib.storage.linux:
- utils - utils
- contrib.openstack.alternatives

View File

@ -0,0 +1,17 @@
''' Helper for managing alternatives for file conflict resolution '''
import subprocess
import shutil
import os
def install_alternative(name, target, source, priority=50):
''' Install alternative configuration '''
if (os.path.exists(target) and not os.path.islink(target)):
# Move existing file/directory away before installing
shutil.move(target, '{}.bak'.format(target))
cmd = [
'update-alternatives', '--force', '--install',
target, name, source, str(priority)
]
subprocess.check_call(cmd)

View File

@ -9,6 +9,7 @@ import json
import yaml import yaml
import subprocess import subprocess
import UserDict import UserDict
from subprocess import CalledProcessError
CRITICAL = "CRITICAL" CRITICAL = "CRITICAL"
ERROR = "ERROR" ERROR = "ERROR"
@ -21,7 +22,7 @@ cache = {}
def cached(func): def cached(func):
''' Cache return values for multiple executions of func + args """Cache return values for multiple executions of func + args
For example: For example:
@ -32,7 +33,7 @@ def cached(func):
unit_get('test') unit_get('test')
will cache the result of unit_get + 'test' for future calls. will cache the result of unit_get + 'test' for future calls.
''' """
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
global cache global cache
key = str((func, args, kwargs)) key = str((func, args, kwargs))
@ -46,8 +47,8 @@ def cached(func):
def flush(key): def flush(key):
''' Flushes any entries from function cache where the """Flushes any entries from function cache where the
key is found in the function+args ''' key is found in the function+args """
flush_list = [] flush_list = []
for item in cache: for item in cache:
if key in item: if key in item:
@ -57,7 +58,7 @@ def flush(key):
def log(message, level=None): def log(message, level=None):
"Write a message to the juju log" """Write a message to the juju log"""
command = ['juju-log'] command = ['juju-log']
if level: if level:
command += ['-l', level] command += ['-l', level]
@ -66,7 +67,7 @@ def log(message, level=None):
class Serializable(UserDict.IterableUserDict): class Serializable(UserDict.IterableUserDict):
"Wrapper, an object that can be serialized to yaml or json" """Wrapper, an object that can be serialized to yaml or json"""
def __init__(self, obj): def __init__(self, obj):
# wrap the object # wrap the object
@ -96,11 +97,11 @@ class Serializable(UserDict.IterableUserDict):
self.data = state self.data = state
def json(self): def json(self):
"Serialize the object to json" """Serialize the object to json"""
return json.dumps(self.data) return json.dumps(self.data)
def yaml(self): def yaml(self):
"Serialize the object to yaml" """Serialize the object to yaml"""
return yaml.dump(self.data) return yaml.dump(self.data)
@ -119,33 +120,38 @@ def execution_environment():
def in_relation_hook(): def in_relation_hook():
"Determine whether we're running in a relation hook" """Determine whether we're running in a relation hook"""
return 'JUJU_RELATION' in os.environ return 'JUJU_RELATION' in os.environ
def relation_type(): def relation_type():
"The scope for the current relation hook" """The scope for the current relation hook"""
return os.environ.get('JUJU_RELATION', None) return os.environ.get('JUJU_RELATION', None)
def relation_id(): def relation_id():
"The relation ID for the current relation hook" """The relation ID for the current relation hook"""
return os.environ.get('JUJU_RELATION_ID', None) return os.environ.get('JUJU_RELATION_ID', None)
def local_unit(): def local_unit():
"Local unit ID" """Local unit ID"""
return os.environ['JUJU_UNIT_NAME'] return os.environ['JUJU_UNIT_NAME']
def remote_unit(): def remote_unit():
"The remote unit for the current relation hook" """The remote unit for the current relation hook"""
return os.environ['JUJU_REMOTE_UNIT'] return os.environ['JUJU_REMOTE_UNIT']
def service_name():
"""The name service group this unit belongs to"""
return local_unit().split('/')[0]
@cached @cached
def config(scope=None): def config(scope=None):
"Juju charm configuration" """Juju charm configuration"""
config_cmd_line = ['config-get'] config_cmd_line = ['config-get']
if scope is not None: if scope is not None:
config_cmd_line.append(scope) config_cmd_line.append(scope)
@ -158,6 +164,7 @@ def config(scope=None):
@cached @cached
def relation_get(attribute=None, unit=None, rid=None): def relation_get(attribute=None, unit=None, rid=None):
"""Get relation information"""
_args = ['relation-get', '--format=json'] _args = ['relation-get', '--format=json']
if rid: if rid:
_args.append('-r') _args.append('-r')
@ -169,9 +176,14 @@ def relation_get(attribute=None, unit=None, rid=None):
return json.loads(subprocess.check_output(_args)) return json.loads(subprocess.check_output(_args))
except ValueError: except ValueError:
return None return None
except CalledProcessError, e:
if e.returncode == 2:
return None
raise
def relation_set(relation_id=None, relation_settings={}, **kwargs): def relation_set(relation_id=None, relation_settings={}, **kwargs):
"""Set relation information for the current unit"""
relation_cmd_line = ['relation-set'] relation_cmd_line = ['relation-set']
if relation_id is not None: if relation_id is not None:
relation_cmd_line.extend(('-r', relation_id)) relation_cmd_line.extend(('-r', relation_id))
@ -187,28 +199,28 @@ def relation_set(relation_id=None, relation_settings={}, **kwargs):
@cached @cached
def relation_ids(reltype=None): def relation_ids(reltype=None):
"A list of relation_ids" """A list of relation_ids"""
reltype = reltype or relation_type() reltype = reltype or relation_type()
relid_cmd_line = ['relation-ids', '--format=json'] relid_cmd_line = ['relation-ids', '--format=json']
if reltype is not None: if reltype is not None:
relid_cmd_line.append(reltype) relid_cmd_line.append(reltype)
return json.loads(subprocess.check_output(relid_cmd_line)) return json.loads(subprocess.check_output(relid_cmd_line)) or []
return [] return []
@cached @cached
def related_units(relid=None): def related_units(relid=None):
"A list of related units" """A list of related units"""
relid = relid or relation_id() relid = relid or relation_id()
units_cmd_line = ['relation-list', '--format=json'] units_cmd_line = ['relation-list', '--format=json']
if relid is not None: if relid is not None:
units_cmd_line.extend(('-r', relid)) units_cmd_line.extend(('-r', relid))
return json.loads(subprocess.check_output(units_cmd_line)) return json.loads(subprocess.check_output(units_cmd_line)) or []
@cached @cached
def relation_for_unit(unit=None, rid=None): def relation_for_unit(unit=None, rid=None):
"Get the json represenation of a unit's relation" """Get the json represenation of a unit's relation"""
unit = unit or remote_unit() unit = unit or remote_unit()
relation = relation_get(unit=unit, rid=rid) relation = relation_get(unit=unit, rid=rid)
for key in relation: for key in relation:
@ -220,7 +232,7 @@ def relation_for_unit(unit=None, rid=None):
@cached @cached
def relations_for_id(relid=None): def relations_for_id(relid=None):
"Get relations of a specific relation ID" """Get relations of a specific relation ID"""
relation_data = [] relation_data = []
relid = relid or relation_ids() relid = relid or relation_ids()
for unit in related_units(relid): for unit in related_units(relid):
@ -232,7 +244,7 @@ def relations_for_id(relid=None):
@cached @cached
def relations_of_type(reltype=None): def relations_of_type(reltype=None):
"Get relations of a specific type" """Get relations of a specific type"""
relation_data = [] relation_data = []
reltype = reltype or relation_type() reltype = reltype or relation_type()
for relid in relation_ids(reltype): for relid in relation_ids(reltype):
@ -244,7 +256,7 @@ def relations_of_type(reltype=None):
@cached @cached
def relation_types(): def relation_types():
"Get a list of relation types supported by this charm" """Get a list of relation types supported by this charm"""
charmdir = os.environ.get('CHARM_DIR', '') charmdir = os.environ.get('CHARM_DIR', '')
mdf = open(os.path.join(charmdir, 'metadata.yaml')) mdf = open(os.path.join(charmdir, 'metadata.yaml'))
md = yaml.safe_load(mdf) md = yaml.safe_load(mdf)
@ -259,6 +271,7 @@ def relation_types():
@cached @cached
def relations(): def relations():
"""Get a nested dictionary of relation data for all related units"""
rels = {} rels = {}
for reltype in relation_types(): for reltype in relation_types():
relids = {} relids = {}
@ -272,15 +285,35 @@ def relations():
return rels return rels
@cached
def is_relation_made(relation, keys='private-address'):
'''
Determine whether a relation is established by checking for
presence of key(s). If a list of keys is provided, they
must all be present for the relation to be identified as made
'''
if isinstance(keys, str):
keys = [keys]
for r_id in relation_ids(relation):
for unit in related_units(r_id):
context = {}
for k in keys:
context[k] = relation_get(k, rid=r_id,
unit=unit)
if None not in context.values():
return True
return False
def open_port(port, protocol="TCP"): def open_port(port, protocol="TCP"):
"Open a service network port" """Open a service network port"""
_args = ['open-port'] _args = ['open-port']
_args.append('{}/{}'.format(port, protocol)) _args.append('{}/{}'.format(port, protocol))
subprocess.check_call(_args) subprocess.check_call(_args)
def close_port(port, protocol="TCP"): def close_port(port, protocol="TCP"):
"Close a service network port" """Close a service network port"""
_args = ['close-port'] _args = ['close-port']
_args.append('{}/{}'.format(port, protocol)) _args.append('{}/{}'.format(port, protocol))
subprocess.check_call(_args) subprocess.check_call(_args)
@ -288,6 +321,7 @@ def close_port(port, protocol="TCP"):
@cached @cached
def unit_get(attribute): def unit_get(attribute):
"""Get the unit ID for the remote unit"""
_args = ['unit-get', '--format=json', attribute] _args = ['unit-get', '--format=json', attribute]
try: try:
return json.loads(subprocess.check_output(_args)) return json.loads(subprocess.check_output(_args))
@ -296,22 +330,46 @@ def unit_get(attribute):
def unit_private_ip(): def unit_private_ip():
"""Get this unit's private IP address"""
return unit_get('private-address') return unit_get('private-address')
class UnregisteredHookError(Exception): class UnregisteredHookError(Exception):
"""Raised when an undefined hook is called"""
pass pass
class Hooks(object): class Hooks(object):
"""A convenient handler for hook functions.
Example:
hooks = Hooks()
# register a hook, taking its name from the function name
@hooks.hook()
def install():
...
# register a hook, providing a custom hook name
@hooks.hook("config-changed")
def config_changed():
...
if __name__ == "__main__":
# execute a hook based on the name the program is called by
hooks.execute(sys.argv)
"""
def __init__(self): def __init__(self):
super(Hooks, self).__init__() super(Hooks, self).__init__()
self._hooks = {} self._hooks = {}
def register(self, name, function): def register(self, name, function):
"""Register a hook"""
self._hooks[name] = function self._hooks[name] = function
def execute(self, args): def execute(self, args):
"""Execute a registered hook based on args[0]"""
hook_name = os.path.basename(args[0]) hook_name = os.path.basename(args[0])
if hook_name in self._hooks: if hook_name in self._hooks:
self._hooks[hook_name]() self._hooks[hook_name]()
@ -319,6 +377,7 @@ class Hooks(object):
raise UnregisteredHookError(hook_name) raise UnregisteredHookError(hook_name)
def hook(self, *hook_names): def hook(self, *hook_names):
"""Decorator, registering them as hooks"""
def wrapper(decorated): def wrapper(decorated):
for hook_name in hook_names: for hook_name in hook_names:
self.register(hook_name, decorated) self.register(hook_name, decorated)
@ -330,5 +389,7 @@ class Hooks(object):
return decorated return decorated
return wrapper return wrapper
def charm_dir(): def charm_dir():
"""Return the root directory of the current charm"""
return os.environ.get('CHARM_DIR') return os.environ.get('CHARM_DIR')

View File

@ -5,42 +5,63 @@
# Nick Moffitt <nick.moffitt@canonical.com> # Nick Moffitt <nick.moffitt@canonical.com>
# Matthew Wedgwood <matthew.wedgwood@canonical.com> # Matthew Wedgwood <matthew.wedgwood@canonical.com>
import apt_pkg
import os import os
import pwd import pwd
import grp import grp
import random
import string
import subprocess import subprocess
import hashlib import hashlib
from collections import OrderedDict from collections import OrderedDict
from hookenv import log, execution_environment from hookenv import log
def service_start(service_name): def service_start(service_name):
service('start', service_name) """Start a system service"""
return service('start', service_name)
def service_stop(service_name): def service_stop(service_name):
service('stop', service_name) """Stop a system service"""
return service('stop', service_name)
def service_restart(service_name): def service_restart(service_name):
service('restart', service_name) """Restart a system service"""
return service('restart', service_name)
def service_reload(service_name, restart_on_failure=False): def service_reload(service_name, restart_on_failure=False):
if not service('reload', service_name) and restart_on_failure: """Reload a system service, optionally falling back to restart if reload fails"""
service('restart', service_name) service_result = service('reload', service_name)
if not service_result and restart_on_failure:
service_result = service('restart', service_name)
return service_result
def service(action, service_name): def service(action, service_name):
"""Control a system service"""
cmd = ['service', service_name, action] cmd = ['service', service_name, action]
return subprocess.call(cmd) == 0 return subprocess.call(cmd) == 0
def service_running(service):
"""Determine whether a system service is running"""
try:
output = subprocess.check_output(['service', service, 'status'])
except subprocess.CalledProcessError:
return False
else:
if ("start/running" in output or "is running" in output):
return True
else:
return False
def adduser(username, password=None, shell='/bin/bash', system_user=False): def adduser(username, password=None, shell='/bin/bash', system_user=False):
"""Add a user""" """Add a user to the system"""
try: try:
user_info = pwd.getpwnam(username) user_info = pwd.getpwnam(username)
log('user {0} already exists!'.format(username)) log('user {0} already exists!'.format(username))
@ -74,36 +95,33 @@ def add_user_to_group(username, group):
def rsync(from_path, to_path, flags='-r', options=None): def rsync(from_path, to_path, flags='-r', options=None):
"""Replicate the contents of a path""" """Replicate the contents of a path"""
context = execution_environment()
options = options or ['--delete', '--executability'] options = options or ['--delete', '--executability']
cmd = ['/usr/bin/rsync', flags] cmd = ['/usr/bin/rsync', flags]
cmd.extend(options) cmd.extend(options)
cmd.append(from_path.format(**context)) cmd.append(from_path)
cmd.append(to_path.format(**context)) cmd.append(to_path)
log(" ".join(cmd)) log(" ".join(cmd))
return subprocess.check_output(cmd).strip() return subprocess.check_output(cmd).strip()
def symlink(source, destination): def symlink(source, destination):
"""Create a symbolic link""" """Create a symbolic link"""
context = execution_environment()
log("Symlinking {} as {}".format(source, destination)) log("Symlinking {} as {}".format(source, destination))
cmd = [ cmd = [
'ln', 'ln',
'-sf', '-sf',
source.format(**context), source,
destination.format(**context) destination,
] ]
subprocess.check_call(cmd) subprocess.check_call(cmd)
def mkdir(path, owner='root', group='root', perms=0555, force=False): def mkdir(path, owner='root', group='root', perms=0555, force=False):
"""Create a directory""" """Create a directory"""
context = execution_environment()
log("Making dir {} {}:{} {:o}".format(path, owner, group, log("Making dir {} {}:{} {:o}".format(path, owner, group,
perms)) perms))
uid = pwd.getpwnam(owner.format(**context)).pw_uid uid = pwd.getpwnam(owner).pw_uid
gid = grp.getgrnam(group.format(**context)).gr_gid gid = grp.getgrnam(group).gr_gid
realpath = os.path.abspath(path) realpath = os.path.abspath(path)
if os.path.exists(realpath): if os.path.exists(realpath):
if force and not os.path.isdir(realpath): if force and not os.path.isdir(realpath):
@ -114,75 +132,19 @@ def mkdir(path, owner='root', group='root', perms=0555, force=False):
os.chown(realpath, uid, gid) os.chown(realpath, uid, gid)
def write_file(path, fmtstr, owner='root', group='root', perms=0444, **kwargs): def write_file(path, content, owner='root', group='root', perms=0444):
"""Create or overwrite a file with the contents of a string""" """Create or overwrite a file with the contents of a string"""
context = execution_environment() log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
context.update(kwargs) uid = pwd.getpwnam(owner).pw_uid
log("Writing file {} {}:{} {:o}".format(path, owner, group, gid = grp.getgrnam(group).gr_gid
perms)) with open(path, 'w') as target:
uid = pwd.getpwnam(owner.format(**context)).pw_uid
gid = grp.getgrnam(group.format(**context)).gr_gid
with open(path.format(**context), 'w') as target:
os.fchown(target.fileno(), uid, gid) os.fchown(target.fileno(), uid, gid)
os.fchmod(target.fileno(), perms) os.fchmod(target.fileno(), perms)
target.write(fmtstr.format(**context)) target.write(content)
def render_template_file(source, destination, **kwargs):
"""Create or overwrite a file using a template"""
log("Rendering template {} for {}".format(source,
destination))
context = execution_environment()
with open(source.format(**context), 'r') as template:
write_file(destination.format(**context), template.read(),
**kwargs)
def filter_installed_packages(packages):
"""Returns a list of packages that require installation"""
apt_pkg.init()
cache = apt_pkg.Cache()
_pkgs = []
for package in packages:
try:
p = cache[package]
p.current_ver or _pkgs.append(package)
except KeyError:
log('Package {} has no installation candidate.'.format(package),
level='WARNING')
_pkgs.append(package)
return _pkgs
def apt_install(packages, options=None, fatal=False):
"""Install one or more packages"""
options = options or []
cmd = ['apt-get', '-y']
cmd.extend(options)
cmd.append('install')
if isinstance(packages, basestring):
cmd.append(packages)
else:
cmd.extend(packages)
log("Installing {} with options: {}".format(packages,
options))
if fatal:
subprocess.check_call(cmd)
else:
subprocess.call(cmd)
def apt_update(fatal=False):
"""Update local apt cache"""
cmd = ['apt-get', 'update']
if fatal:
subprocess.check_call(cmd)
else:
subprocess.call(cmd)
def mount(device, mountpoint, options=None, persist=False): def mount(device, mountpoint, options=None, persist=False):
'''Mount a filesystem''' """Mount a filesystem at a particular mountpoint"""
cmd_args = ['mount'] cmd_args = ['mount']
if options is not None: if options is not None:
cmd_args.extend(['-o', options]) cmd_args.extend(['-o', options])
@ -199,7 +161,7 @@ def mount(device, mountpoint, options=None, persist=False):
def umount(mountpoint, persist=False): def umount(mountpoint, persist=False):
'''Unmount a filesystem''' """Unmount a filesystem"""
cmd_args = ['umount', mountpoint] cmd_args = ['umount', mountpoint]
try: try:
subprocess.check_output(cmd_args) subprocess.check_output(cmd_args)
@ -213,7 +175,7 @@ def umount(mountpoint, persist=False):
def mounts(): def mounts():
'''List of all mounted volumes as [[mountpoint,device],[...]]''' """Get a list of all mounted volumes as [[mountpoint,device],[...]]"""
with open('/proc/mounts') as f: with open('/proc/mounts') as f:
# [['/mount/point','/dev/path'],[...]] # [['/mount/point','/dev/path'],[...]]
system_mounts = [m[1::-1] for m in [l.strip().split() system_mounts = [m[1::-1] for m in [l.strip().split()
@ -222,7 +184,7 @@ def mounts():
def file_hash(path): def file_hash(path):
''' Generate a md5 hash of the contents of 'path' or None if not found ''' """Generate a md5 hash of the contents of 'path' or None if not found """
if os.path.exists(path): if os.path.exists(path):
h = hashlib.md5() h = hashlib.md5()
with open(path, 'r') as source: with open(path, 'r') as source:
@ -233,7 +195,7 @@ def file_hash(path):
def restart_on_change(restart_map): def restart_on_change(restart_map):
''' Restart services based on configuration files changing """Restart services based on configuration files changing
This function is used a decorator, for example This function is used a decorator, for example
@ -246,7 +208,7 @@ def restart_on_change(restart_map):
In this example, the cinder-api and cinder-volume services In this example, the cinder-api and cinder-volume services
would be restarted if /etc/ceph/ceph.conf is changed by the would be restarted if /etc/ceph/ceph.conf is changed by the
ceph_client_changed function. ceph_client_changed function.
''' """
def wrap(f): def wrap(f):
def wrapped_f(*args): def wrapped_f(*args):
checksums = {} checksums = {}
@ -264,10 +226,22 @@ def restart_on_change(restart_map):
def lsb_release(): def lsb_release():
'''Return /etc/lsb-release in a dict''' """Return /etc/lsb-release in a dict"""
d = {} d = {}
with open('/etc/lsb-release', 'r') as lsb: with open('/etc/lsb-release', 'r') as lsb:
for l in lsb: for l in lsb:
k, v = l.split('=') k, v = l.split('=')
d[k.strip()] = v.strip() d[k.strip()] = v.strip()
return d return d
def pwgen(length=None):
"""Generate a random pasword."""
if length is None:
length = random.choice(range(35, 45))
alphanumeric_chars = [
l for l in (string.letters + string.digits)
if l not in 'l0QD1vAEIOUaeiou']
random_chars = [
random.choice(alphanumeric_chars) for _ in range(length)]
return(''.join(random_chars))

View File

@ -1,9 +1,6 @@
import importlib import importlib
from yaml import safe_load from yaml import safe_load
from charmhelpers.core.host import ( from charmhelpers.core.host import (
apt_install,
apt_update,
filter_installed_packages,
lsb_release lsb_release
) )
from urlparse import ( from urlparse import (
@ -15,6 +12,8 @@ from charmhelpers.core.hookenv import (
config, config,
log, log,
) )
import apt_pkg
import os
CLOUD_ARCHIVE = """# Ubuntu Cloud Archive CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
@ -22,18 +21,128 @@ deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
PROPOSED_POCKET = """# Proposed PROPOSED_POCKET = """# Proposed
deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
""" """
CLOUD_ARCHIVE_POCKETS = {
# Folsom
'folsom': 'precise-updates/folsom',
'precise-folsom': 'precise-updates/folsom',
'precise-folsom/updates': 'precise-updates/folsom',
'precise-updates/folsom': 'precise-updates/folsom',
'folsom/proposed': 'precise-proposed/folsom',
'precise-folsom/proposed': 'precise-proposed/folsom',
'precise-proposed/folsom': 'precise-proposed/folsom',
# Grizzly
'grizzly': 'precise-updates/grizzly',
'precise-grizzly': 'precise-updates/grizzly',
'precise-grizzly/updates': 'precise-updates/grizzly',
'precise-updates/grizzly': 'precise-updates/grizzly',
'grizzly/proposed': 'precise-proposed/grizzly',
'precise-grizzly/proposed': 'precise-proposed/grizzly',
'precise-proposed/grizzly': 'precise-proposed/grizzly',
# Havana
'havana': 'precise-updates/havana',
'precise-havana': 'precise-updates/havana',
'precise-havana/updates': 'precise-updates/havana',
'precise-updates/havana': 'precise-updates/havana',
'havana/proposed': 'precise-proposed/havana',
'precies-havana/proposed': 'precise-proposed/havana',
'precise-proposed/havana': 'precise-proposed/havana',
}
def filter_installed_packages(packages):
"""Returns a list of packages that require installation"""
apt_pkg.init()
cache = apt_pkg.Cache()
_pkgs = []
for package in packages:
try:
p = cache[package]
p.current_ver or _pkgs.append(package)
except KeyError:
log('Package {} has no installation candidate.'.format(package),
level='WARNING')
_pkgs.append(package)
return _pkgs
def apt_install(packages, options=None, fatal=False):
"""Install one or more packages"""
if options is None:
options = ['--option=Dpkg::Options::=--force-confold']
cmd = ['apt-get', '--assume-yes']
cmd.extend(options)
cmd.append('install')
if isinstance(packages, basestring):
cmd.append(packages)
else:
cmd.extend(packages)
log("Installing {} with options: {}".format(packages,
options))
env = os.environ.copy()
if 'DEBIAN_FRONTEND' not in env:
env['DEBIAN_FRONTEND'] = 'noninteractive'
if fatal:
subprocess.check_call(cmd, env=env)
else:
subprocess.call(cmd, env=env)
def apt_update(fatal=False):
"""Update local apt cache"""
cmd = ['apt-get', 'update']
if fatal:
subprocess.check_call(cmd)
else:
subprocess.call(cmd)
def apt_purge(packages, fatal=False):
"""Purge one or more packages"""
cmd = ['apt-get', '--assume-yes', 'purge']
if isinstance(packages, basestring):
cmd.append(packages)
else:
cmd.extend(packages)
log("Purging {}".format(packages))
if fatal:
subprocess.check_call(cmd)
else:
subprocess.call(cmd)
def apt_hold(packages, fatal=False):
"""Hold one or more packages"""
cmd = ['apt-mark', 'hold']
if isinstance(packages, basestring):
cmd.append(packages)
else:
cmd.extend(packages)
log("Holding {}".format(packages))
if fatal:
subprocess.check_call(cmd)
else:
subprocess.call(cmd)
def add_source(source, key=None): def add_source(source, key=None):
if ((source.startswith('ppa:') or if (source.startswith('ppa:') or
source.startswith('http:'))): source.startswith('http:') or
subprocess.check_call(['add-apt-repository', source]) source.startswith('deb ') or
source.startswith('cloud-archive:')):
subprocess.check_call(['add-apt-repository', '--yes', source])
elif source.startswith('cloud:'): elif source.startswith('cloud:'):
apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),
fatal=True) fatal=True)
pocket = source.split(':')[-1] pocket = source.split(':')[-1]
if pocket not in CLOUD_ARCHIVE_POCKETS:
raise SourceConfigError(
'Unsupported cloud: source option %s' %
pocket)
actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
apt.write(CLOUD_ARCHIVE.format(pocket)) apt.write(CLOUD_ARCHIVE.format(actual_pocket))
elif source == 'proposed': elif source == 'proposed':
release = lsb_release()['DISTRIB_CODENAME'] release = lsb_release()['DISTRIB_CODENAME']
with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
@ -63,8 +172,11 @@ def configure_sources(update=False,
Note that 'null' (a.k.a. None) should not be quoted. Note that 'null' (a.k.a. None) should not be quoted.
""" """
sources = safe_load(config(sources_var)) sources = safe_load(config(sources_var))
keys = safe_load(config(keys_var)) keys = config(keys_var)
if isinstance(sources, basestring) and isinstance(keys, basestring): if keys is not None:
keys = safe_load(keys)
if isinstance(sources, basestring) and (
keys is None or isinstance(keys, basestring)):
add_source(sources, keys) add_source(sources, keys)
else: else:
if not len(sources) == len(keys): if not len(sources) == len(keys):
@ -79,6 +191,7 @@ def configure_sources(update=False,
# least- to most-specific URL matching. # least- to most-specific URL matching.
FETCH_HANDLERS = ( FETCH_HANDLERS = (
'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler',
'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler',
) )
@ -98,6 +211,7 @@ def install_remote(source):
# We ONLY check for True here because can_handle may return a string # We ONLY check for True here because can_handle may return a string
# explaining why it can't handle a given source. # explaining why it can't handle a given source.
handlers = [h for h in plugins() if h.can_handle(source) is True] handlers = [h for h in plugins() if h.can_handle(source) is True]
installed_to = None
for handler in handlers: for handler in handlers:
try: try:
installed_to = handler.install(source) installed_to = handler.install(source)
@ -115,7 +229,9 @@ def install_from_config(config_var_name):
class BaseFetchHandler(object): class BaseFetchHandler(object):
"""Base class for FetchHandler implementations in fetch plugins""" """Base class for FetchHandler implementations in fetch plugins"""
def can_handle(self, source): def can_handle(self, source):
"""Returns True if the source can be handled. Otherwise returns """Returns True if the source can be handled. Otherwise returns
a string explaining why it cannot""" a string explaining why it cannot"""
@ -143,10 +259,13 @@ def plugins(fetch_handlers=None):
for handler_name in fetch_handlers: for handler_name in fetch_handlers:
package, classname = handler_name.rsplit('.', 1) package, classname = handler_name.rsplit('.', 1)
try: try:
handler_class = getattr(importlib.import_module(package), classname) handler_class = getattr(
importlib.import_module(package),
classname)
plugin_list.append(handler_class()) plugin_list.append(handler_class())
except (ImportError, AttributeError): except (ImportError, AttributeError):
# Skip missing plugins so that they can be ommitted from # Skip missing plugins so that they can be ommitted from
# installation if desired # installation if desired
log("FetchHandler {} not found, skipping plugin".format(handler_name)) log("FetchHandler {} not found, skipping plugin".format(
handler_name))
return plugin_list return plugin_list

View File

@ -8,6 +8,7 @@ from charmhelpers.payload.archive import (
get_archive_handler, get_archive_handler,
extract, extract,
) )
from charmhelpers.core.host import mkdir
class ArchiveUrlFetchHandler(BaseFetchHandler): class ArchiveUrlFetchHandler(BaseFetchHandler):
@ -24,20 +25,24 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
# propogate all exceptions # propogate all exceptions
# URLError, OSError, etc # URLError, OSError, etc
response = urllib2.urlopen(source) response = urllib2.urlopen(source)
with open(dest, 'w') as dest_file: try:
dest_file.write(response.read()) with open(dest, 'w') as dest_file:
dest_file.write(response.read())
except Exception as e:
if os.path.isfile(dest):
os.unlink(dest)
raise e
def install(self, source): def install(self, source):
url_parts = self.parse_url(source) url_parts = self.parse_url(source)
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755)
dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
try: try:
self.download(source, dld_file) self.download(source, dld_file)
except urllib2.URLError as e: except urllib2.URLError as e:
return UnhandledSource(e.reason) raise UnhandledSource(e.reason)
except OSError as e: except OSError as e:
return UnhandledSource(e.strerror) raise UnhandledSource(e.strerror)
finally:
if os.path.isfile(dld_file):
os.unlink(dld_file)
return extract(dld_file) return extract(dld_file)

View File

@ -0,0 +1,49 @@
import os
from charmhelpers.fetch import (
BaseFetchHandler,
UnhandledSource
)
from charmhelpers.core.host import mkdir
try:
from bzrlib.branch import Branch
except ImportError:
from charmhelpers.fetch import apt_install
apt_install("python-bzrlib")
from bzrlib.branch import Branch
class BzrUrlFetchHandler(BaseFetchHandler):
"""Handler for bazaar branches via generic and lp URLs"""
def can_handle(self, source):
url_parts = self.parse_url(source)
if url_parts.scheme not in ('bzr+ssh', 'lp'):
return False
else:
return True
def branch(self, source, dest):
url_parts = self.parse_url(source)
# If we use lp:branchname scheme we need to load plugins
if not self.can_handle(source):
raise UnhandledSource("Cannot handle {}".format(source))
if url_parts.scheme == "lp":
from bzrlib.plugin import load_plugins
load_plugins()
try:
remote_branch = Branch.open(source)
remote_branch.bzrdir.sprout(dest).open_branch()
except Exception as e:
raise e
def install(self, source):
url_parts = self.parse_url(source)
branch_name = url_parts.path.strip("/").split("/")[-1]
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name)
if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755)
try:
self.branch(source, dest_dir)
except OSError as e:
raise UnhandledSource(e.strerror)
return dest_dir

View File

@ -21,21 +21,27 @@ from charmhelpers.core.hookenv import (
related_units, related_units,
relation_get, relation_get,
Hooks, Hooks,
UnregisteredHookError UnregisteredHookError,
service_name
) )
from charmhelpers.core.host import ( from charmhelpers.core.host import (
umount,
mkdir
)
from charmhelpers.fetch import (
add_source,
apt_install, apt_install,
apt_update, apt_update,
filter_installed_packages, filter_installed_packages,
umount
) )
from charmhelpers.fetch import add_source
from utils import ( from utils import (
render_template, render_template,
get_host_ip, get_host_ip,
) )
from charmhelpers.contrib.openstack.alternatives import install_alternative
hooks = Hooks() hooks = Hooks()
@ -66,9 +72,14 @@ def emit_cephconf():
'fsid': get_fsid(), 'fsid': get_fsid(),
'version': ceph.get_ceph_version() 'version': ceph.get_ceph_version()
} }
# Install ceph.conf as an alternative to support
with open('/etc/ceph/ceph.conf', 'w') as cephconf: # co-existence with other charms that write this file
charm_ceph_conf = "/var/lib/charm/{}/ceph.conf".format(service_name())
mkdir(os.path.dirname(charm_ceph_conf))
with open(charm_ceph_conf, 'w') as cephconf:
cephconf.write(render_template('ceph.conf', cephcontext)) cephconf.write(render_template('ceph.conf', cephcontext))
install_alternative('ceph.conf', '/etc/ceph/ceph.conf',
charm_ceph_conf, 90)
JOURNAL_ZAPPED = '/var/lib/ceph/journal_zapped' JOURNAL_ZAPPED = '/var/lib/ceph/journal_zapped'

View File

@ -13,7 +13,7 @@ from charmhelpers.core.hookenv import (
unit_get, unit_get,
cached cached
) )
from charmhelpers.core.host import ( from charmhelpers.fetch import (
apt_install, apt_install,
filter_installed_packages filter_installed_packages
) )

View File

@ -1 +1 @@
11 13