[james-page] Managed ceph.conf using alternatives

This commit is contained in:
James Page 2013-11-18 12:17:59 +00:00
commit 0b11d065cb
11 changed files with 372 additions and 135 deletions

View File

@ -5,3 +5,4 @@ include:
- fetch
- contrib.storage.linux:
- utils
- contrib.openstack.alternatives

View File

@ -0,0 +1,17 @@
''' Helper for managing alternatives for file conflict resolution '''
import subprocess
import shutil
import os
def install_alternative(name, target, source, priority=50):
''' Install alternative configuration '''
if (os.path.exists(target) and not os.path.islink(target)):
# Move existing file/directory away before installing
shutil.move(target, '{}.bak'.format(target))
cmd = [
'update-alternatives', '--force', '--install',
target, name, source, str(priority)
]
subprocess.check_call(cmd)

View File

@ -9,6 +9,7 @@ import json
import yaml
import subprocess
import UserDict
from subprocess import CalledProcessError
CRITICAL = "CRITICAL"
ERROR = "ERROR"
@ -21,7 +22,7 @@ cache = {}
def cached(func):
''' Cache return values for multiple executions of func + args
"""Cache return values for multiple executions of func + args
For example:
@ -32,7 +33,7 @@ def cached(func):
unit_get('test')
will cache the result of unit_get + 'test' for future calls.
'''
"""
def wrapper(*args, **kwargs):
global cache
key = str((func, args, kwargs))
@ -46,8 +47,8 @@ def cached(func):
def flush(key):
''' Flushes any entries from function cache where the
key is found in the function+args '''
"""Flushes any entries from function cache where the
key is found in the function+args """
flush_list = []
for item in cache:
if key in item:
@ -57,7 +58,7 @@ def flush(key):
def log(message, level=None):
"Write a message to the juju log"
"""Write a message to the juju log"""
command = ['juju-log']
if level:
command += ['-l', level]
@ -66,7 +67,7 @@ def log(message, level=None):
class Serializable(UserDict.IterableUserDict):
"Wrapper, an object that can be serialized to yaml or json"
"""Wrapper, an object that can be serialized to yaml or json"""
def __init__(self, obj):
# wrap the object
@ -96,11 +97,11 @@ class Serializable(UserDict.IterableUserDict):
self.data = state
def json(self):
"Serialize the object to json"
"""Serialize the object to json"""
return json.dumps(self.data)
def yaml(self):
"Serialize the object to yaml"
"""Serialize the object to yaml"""
return yaml.dump(self.data)
@ -119,33 +120,38 @@ def execution_environment():
def in_relation_hook():
"Determine whether we're running in a relation hook"
"""Determine whether we're running in a relation hook"""
return 'JUJU_RELATION' in os.environ
def relation_type():
"The scope for the current relation hook"
"""The scope for the current relation hook"""
return os.environ.get('JUJU_RELATION', None)
def relation_id():
"The relation ID for the current relation hook"
"""The relation ID for the current relation hook"""
return os.environ.get('JUJU_RELATION_ID', None)
def local_unit():
"Local unit ID"
"""Local unit ID"""
return os.environ['JUJU_UNIT_NAME']
def remote_unit():
"The remote unit for the current relation hook"
"""The remote unit for the current relation hook"""
return os.environ['JUJU_REMOTE_UNIT']
def service_name():
"""The name service group this unit belongs to"""
return local_unit().split('/')[0]
@cached
def config(scope=None):
"Juju charm configuration"
"""Juju charm configuration"""
config_cmd_line = ['config-get']
if scope is not None:
config_cmd_line.append(scope)
@ -158,6 +164,7 @@ def config(scope=None):
@cached
def relation_get(attribute=None, unit=None, rid=None):
"""Get relation information"""
_args = ['relation-get', '--format=json']
if rid:
_args.append('-r')
@ -169,9 +176,14 @@ def relation_get(attribute=None, unit=None, rid=None):
return json.loads(subprocess.check_output(_args))
except ValueError:
return None
except CalledProcessError, e:
if e.returncode == 2:
return None
raise
def relation_set(relation_id=None, relation_settings={}, **kwargs):
"""Set relation information for the current unit"""
relation_cmd_line = ['relation-set']
if relation_id is not None:
relation_cmd_line.extend(('-r', relation_id))
@ -187,28 +199,28 @@ def relation_set(relation_id=None, relation_settings={}, **kwargs):
@cached
def relation_ids(reltype=None):
"A list of relation_ids"
"""A list of relation_ids"""
reltype = reltype or relation_type()
relid_cmd_line = ['relation-ids', '--format=json']
if reltype is not None:
relid_cmd_line.append(reltype)
return json.loads(subprocess.check_output(relid_cmd_line))
return json.loads(subprocess.check_output(relid_cmd_line)) or []
return []
@cached
def related_units(relid=None):
"A list of related units"
"""A list of related units"""
relid = relid or relation_id()
units_cmd_line = ['relation-list', '--format=json']
if relid is not None:
units_cmd_line.extend(('-r', relid))
return json.loads(subprocess.check_output(units_cmd_line))
return json.loads(subprocess.check_output(units_cmd_line)) or []
@cached
def relation_for_unit(unit=None, rid=None):
"Get the json represenation of a unit's relation"
"""Get the json represenation of a unit's relation"""
unit = unit or remote_unit()
relation = relation_get(unit=unit, rid=rid)
for key in relation:
@ -220,7 +232,7 @@ def relation_for_unit(unit=None, rid=None):
@cached
def relations_for_id(relid=None):
"Get relations of a specific relation ID"
"""Get relations of a specific relation ID"""
relation_data = []
relid = relid or relation_ids()
for unit in related_units(relid):
@ -232,7 +244,7 @@ def relations_for_id(relid=None):
@cached
def relations_of_type(reltype=None):
"Get relations of a specific type"
"""Get relations of a specific type"""
relation_data = []
reltype = reltype or relation_type()
for relid in relation_ids(reltype):
@ -244,7 +256,7 @@ def relations_of_type(reltype=None):
@cached
def relation_types():
"Get a list of relation types supported by this charm"
"""Get a list of relation types supported by this charm"""
charmdir = os.environ.get('CHARM_DIR', '')
mdf = open(os.path.join(charmdir, 'metadata.yaml'))
md = yaml.safe_load(mdf)
@ -259,6 +271,7 @@ def relation_types():
@cached
def relations():
"""Get a nested dictionary of relation data for all related units"""
rels = {}
for reltype in relation_types():
relids = {}
@ -272,15 +285,35 @@ def relations():
return rels
@cached
def is_relation_made(relation, keys='private-address'):
'''
Determine whether a relation is established by checking for
presence of key(s). If a list of keys is provided, they
must all be present for the relation to be identified as made
'''
if isinstance(keys, str):
keys = [keys]
for r_id in relation_ids(relation):
for unit in related_units(r_id):
context = {}
for k in keys:
context[k] = relation_get(k, rid=r_id,
unit=unit)
if None not in context.values():
return True
return False
def open_port(port, protocol="TCP"):
"Open a service network port"
"""Open a service network port"""
_args = ['open-port']
_args.append('{}/{}'.format(port, protocol))
subprocess.check_call(_args)
def close_port(port, protocol="TCP"):
"Close a service network port"
"""Close a service network port"""
_args = ['close-port']
_args.append('{}/{}'.format(port, protocol))
subprocess.check_call(_args)
@ -288,6 +321,7 @@ def close_port(port, protocol="TCP"):
@cached
def unit_get(attribute):
"""Get the unit ID for the remote unit"""
_args = ['unit-get', '--format=json', attribute]
try:
return json.loads(subprocess.check_output(_args))
@ -296,22 +330,46 @@ def unit_get(attribute):
def unit_private_ip():
"""Get this unit's private IP address"""
return unit_get('private-address')
class UnregisteredHookError(Exception):
"""Raised when an undefined hook is called"""
pass
class Hooks(object):
"""A convenient handler for hook functions.
Example:
hooks = Hooks()
# register a hook, taking its name from the function name
@hooks.hook()
def install():
...
# register a hook, providing a custom hook name
@hooks.hook("config-changed")
def config_changed():
...
if __name__ == "__main__":
# execute a hook based on the name the program is called by
hooks.execute(sys.argv)
"""
def __init__(self):
super(Hooks, self).__init__()
self._hooks = {}
def register(self, name, function):
"""Register a hook"""
self._hooks[name] = function
def execute(self, args):
"""Execute a registered hook based on args[0]"""
hook_name = os.path.basename(args[0])
if hook_name in self._hooks:
self._hooks[hook_name]()
@ -319,6 +377,7 @@ class Hooks(object):
raise UnregisteredHookError(hook_name)
def hook(self, *hook_names):
"""Decorator, registering them as hooks"""
def wrapper(decorated):
for hook_name in hook_names:
self.register(hook_name, decorated)
@ -330,5 +389,7 @@ class Hooks(object):
return decorated
return wrapper
def charm_dir():
"""Return the root directory of the current charm"""
return os.environ.get('CHARM_DIR')

View File

@ -5,42 +5,63 @@
# Nick Moffitt <nick.moffitt@canonical.com>
# Matthew Wedgwood <matthew.wedgwood@canonical.com>
import apt_pkg
import os
import pwd
import grp
import random
import string
import subprocess
import hashlib
from collections import OrderedDict
from hookenv import log, execution_environment
from hookenv import log
def service_start(service_name):
service('start', service_name)
"""Start a system service"""
return service('start', service_name)
def service_stop(service_name):
service('stop', service_name)
"""Stop a system service"""
return service('stop', service_name)
def service_restart(service_name):
service('restart', service_name)
"""Restart a system service"""
return service('restart', service_name)
def service_reload(service_name, restart_on_failure=False):
if not service('reload', service_name) and restart_on_failure:
service('restart', service_name)
"""Reload a system service, optionally falling back to restart if reload fails"""
service_result = service('reload', service_name)
if not service_result and restart_on_failure:
service_result = service('restart', service_name)
return service_result
def service(action, service_name):
"""Control a system service"""
cmd = ['service', service_name, action]
return subprocess.call(cmd) == 0
def service_running(service):
"""Determine whether a system service is running"""
try:
output = subprocess.check_output(['service', service, 'status'])
except subprocess.CalledProcessError:
return False
else:
if ("start/running" in output or "is running" in output):
return True
else:
return False
def adduser(username, password=None, shell='/bin/bash', system_user=False):
"""Add a user"""
"""Add a user to the system"""
try:
user_info = pwd.getpwnam(username)
log('user {0} already exists!'.format(username))
@ -74,36 +95,33 @@ def add_user_to_group(username, group):
def rsync(from_path, to_path, flags='-r', options=None):
"""Replicate the contents of a path"""
context = execution_environment()
options = options or ['--delete', '--executability']
cmd = ['/usr/bin/rsync', flags]
cmd.extend(options)
cmd.append(from_path.format(**context))
cmd.append(to_path.format(**context))
cmd.append(from_path)
cmd.append(to_path)
log(" ".join(cmd))
return subprocess.check_output(cmd).strip()
def symlink(source, destination):
"""Create a symbolic link"""
context = execution_environment()
log("Symlinking {} as {}".format(source, destination))
cmd = [
'ln',
'-sf',
source.format(**context),
destination.format(**context)
source,
destination,
]
subprocess.check_call(cmd)
def mkdir(path, owner='root', group='root', perms=0555, force=False):
"""Create a directory"""
context = execution_environment()
log("Making dir {} {}:{} {:o}".format(path, owner, group,
perms))
uid = pwd.getpwnam(owner.format(**context)).pw_uid
gid = grp.getgrnam(group.format(**context)).gr_gid
uid = pwd.getpwnam(owner).pw_uid
gid = grp.getgrnam(group).gr_gid
realpath = os.path.abspath(path)
if os.path.exists(realpath):
if force and not os.path.isdir(realpath):
@ -114,75 +132,19 @@ def mkdir(path, owner='root', group='root', perms=0555, force=False):
os.chown(realpath, uid, gid)
def write_file(path, fmtstr, owner='root', group='root', perms=0444, **kwargs):
def write_file(path, content, owner='root', group='root', perms=0444):
"""Create or overwrite a file with the contents of a string"""
context = execution_environment()
context.update(kwargs)
log("Writing file {} {}:{} {:o}".format(path, owner, group,
perms))
uid = pwd.getpwnam(owner.format(**context)).pw_uid
gid = grp.getgrnam(group.format(**context)).gr_gid
with open(path.format(**context), 'w') as target:
log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
uid = pwd.getpwnam(owner).pw_uid
gid = grp.getgrnam(group).gr_gid
with open(path, 'w') as target:
os.fchown(target.fileno(), uid, gid)
os.fchmod(target.fileno(), perms)
target.write(fmtstr.format(**context))
def render_template_file(source, destination, **kwargs):
"""Create or overwrite a file using a template"""
log("Rendering template {} for {}".format(source,
destination))
context = execution_environment()
with open(source.format(**context), 'r') as template:
write_file(destination.format(**context), template.read(),
**kwargs)
def filter_installed_packages(packages):
"""Returns a list of packages that require installation"""
apt_pkg.init()
cache = apt_pkg.Cache()
_pkgs = []
for package in packages:
try:
p = cache[package]
p.current_ver or _pkgs.append(package)
except KeyError:
log('Package {} has no installation candidate.'.format(package),
level='WARNING')
_pkgs.append(package)
return _pkgs
def apt_install(packages, options=None, fatal=False):
"""Install one or more packages"""
options = options or []
cmd = ['apt-get', '-y']
cmd.extend(options)
cmd.append('install')
if isinstance(packages, basestring):
cmd.append(packages)
else:
cmd.extend(packages)
log("Installing {} with options: {}".format(packages,
options))
if fatal:
subprocess.check_call(cmd)
else:
subprocess.call(cmd)
def apt_update(fatal=False):
"""Update local apt cache"""
cmd = ['apt-get', 'update']
if fatal:
subprocess.check_call(cmd)
else:
subprocess.call(cmd)
target.write(content)
def mount(device, mountpoint, options=None, persist=False):
'''Mount a filesystem'''
"""Mount a filesystem at a particular mountpoint"""
cmd_args = ['mount']
if options is not None:
cmd_args.extend(['-o', options])
@ -199,7 +161,7 @@ def mount(device, mountpoint, options=None, persist=False):
def umount(mountpoint, persist=False):
'''Unmount a filesystem'''
"""Unmount a filesystem"""
cmd_args = ['umount', mountpoint]
try:
subprocess.check_output(cmd_args)
@ -213,7 +175,7 @@ def umount(mountpoint, persist=False):
def mounts():
'''List of all mounted volumes as [[mountpoint,device],[...]]'''
"""Get a list of all mounted volumes as [[mountpoint,device],[...]]"""
with open('/proc/mounts') as f:
# [['/mount/point','/dev/path'],[...]]
system_mounts = [m[1::-1] for m in [l.strip().split()
@ -222,7 +184,7 @@ def mounts():
def file_hash(path):
''' Generate a md5 hash of the contents of 'path' or None if not found '''
"""Generate a md5 hash of the contents of 'path' or None if not found """
if os.path.exists(path):
h = hashlib.md5()
with open(path, 'r') as source:
@ -233,7 +195,7 @@ def file_hash(path):
def restart_on_change(restart_map):
''' Restart services based on configuration files changing
"""Restart services based on configuration files changing
This function is used a decorator, for example
@ -246,7 +208,7 @@ def restart_on_change(restart_map):
In this example, the cinder-api and cinder-volume services
would be restarted if /etc/ceph/ceph.conf is changed by the
ceph_client_changed function.
'''
"""
def wrap(f):
def wrapped_f(*args):
checksums = {}
@ -264,10 +226,22 @@ def restart_on_change(restart_map):
def lsb_release():
'''Return /etc/lsb-release in a dict'''
"""Return /etc/lsb-release in a dict"""
d = {}
with open('/etc/lsb-release', 'r') as lsb:
for l in lsb:
k, v = l.split('=')
d[k.strip()] = v.strip()
return d
def pwgen(length=None):
"""Generate a random pasword."""
if length is None:
length = random.choice(range(35, 45))
alphanumeric_chars = [
l for l in (string.letters + string.digits)
if l not in 'l0QD1vAEIOUaeiou']
random_chars = [
random.choice(alphanumeric_chars) for _ in range(length)]
return(''.join(random_chars))

View File

@ -1,9 +1,6 @@
import importlib
from yaml import safe_load
from charmhelpers.core.host import (
apt_install,
apt_update,
filter_installed_packages,
lsb_release
)
from urlparse import (
@ -15,6 +12,8 @@ from charmhelpers.core.hookenv import (
config,
log,
)
import apt_pkg
import os
CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
@ -22,18 +21,128 @@ deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
PROPOSED_POCKET = """# Proposed
deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
"""
CLOUD_ARCHIVE_POCKETS = {
# Folsom
'folsom': 'precise-updates/folsom',
'precise-folsom': 'precise-updates/folsom',
'precise-folsom/updates': 'precise-updates/folsom',
'precise-updates/folsom': 'precise-updates/folsom',
'folsom/proposed': 'precise-proposed/folsom',
'precise-folsom/proposed': 'precise-proposed/folsom',
'precise-proposed/folsom': 'precise-proposed/folsom',
# Grizzly
'grizzly': 'precise-updates/grizzly',
'precise-grizzly': 'precise-updates/grizzly',
'precise-grizzly/updates': 'precise-updates/grizzly',
'precise-updates/grizzly': 'precise-updates/grizzly',
'grizzly/proposed': 'precise-proposed/grizzly',
'precise-grizzly/proposed': 'precise-proposed/grizzly',
'precise-proposed/grizzly': 'precise-proposed/grizzly',
# Havana
'havana': 'precise-updates/havana',
'precise-havana': 'precise-updates/havana',
'precise-havana/updates': 'precise-updates/havana',
'precise-updates/havana': 'precise-updates/havana',
'havana/proposed': 'precise-proposed/havana',
'precies-havana/proposed': 'precise-proposed/havana',
'precise-proposed/havana': 'precise-proposed/havana',
}
def filter_installed_packages(packages):
"""Returns a list of packages that require installation"""
apt_pkg.init()
cache = apt_pkg.Cache()
_pkgs = []
for package in packages:
try:
p = cache[package]
p.current_ver or _pkgs.append(package)
except KeyError:
log('Package {} has no installation candidate.'.format(package),
level='WARNING')
_pkgs.append(package)
return _pkgs
def apt_install(packages, options=None, fatal=False):
"""Install one or more packages"""
if options is None:
options = ['--option=Dpkg::Options::=--force-confold']
cmd = ['apt-get', '--assume-yes']
cmd.extend(options)
cmd.append('install')
if isinstance(packages, basestring):
cmd.append(packages)
else:
cmd.extend(packages)
log("Installing {} with options: {}".format(packages,
options))
env = os.environ.copy()
if 'DEBIAN_FRONTEND' not in env:
env['DEBIAN_FRONTEND'] = 'noninteractive'
if fatal:
subprocess.check_call(cmd, env=env)
else:
subprocess.call(cmd, env=env)
def apt_update(fatal=False):
"""Update local apt cache"""
cmd = ['apt-get', 'update']
if fatal:
subprocess.check_call(cmd)
else:
subprocess.call(cmd)
def apt_purge(packages, fatal=False):
"""Purge one or more packages"""
cmd = ['apt-get', '--assume-yes', 'purge']
if isinstance(packages, basestring):
cmd.append(packages)
else:
cmd.extend(packages)
log("Purging {}".format(packages))
if fatal:
subprocess.check_call(cmd)
else:
subprocess.call(cmd)
def apt_hold(packages, fatal=False):
"""Hold one or more packages"""
cmd = ['apt-mark', 'hold']
if isinstance(packages, basestring):
cmd.append(packages)
else:
cmd.extend(packages)
log("Holding {}".format(packages))
if fatal:
subprocess.check_call(cmd)
else:
subprocess.call(cmd)
def add_source(source, key=None):
if ((source.startswith('ppa:') or
source.startswith('http:'))):
subprocess.check_call(['add-apt-repository', source])
if (source.startswith('ppa:') or
source.startswith('http:') or
source.startswith('deb ') or
source.startswith('cloud-archive:')):
subprocess.check_call(['add-apt-repository', '--yes', source])
elif source.startswith('cloud:'):
apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),
fatal=True)
pocket = source.split(':')[-1]
if pocket not in CLOUD_ARCHIVE_POCKETS:
raise SourceConfigError(
'Unsupported cloud: source option %s' %
pocket)
actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
apt.write(CLOUD_ARCHIVE.format(pocket))
apt.write(CLOUD_ARCHIVE.format(actual_pocket))
elif source == 'proposed':
release = lsb_release()['DISTRIB_CODENAME']
with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
@ -63,8 +172,11 @@ def configure_sources(update=False,
Note that 'null' (a.k.a. None) should not be quoted.
"""
sources = safe_load(config(sources_var))
keys = safe_load(config(keys_var))
if isinstance(sources, basestring) and isinstance(keys, basestring):
keys = config(keys_var)
if keys is not None:
keys = safe_load(keys)
if isinstance(sources, basestring) and (
keys is None or isinstance(keys, basestring)):
add_source(sources, keys)
else:
if not len(sources) == len(keys):
@ -79,6 +191,7 @@ def configure_sources(update=False,
# least- to most-specific URL matching.
FETCH_HANDLERS = (
'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler',
'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler',
)
@ -98,6 +211,7 @@ def install_remote(source):
# We ONLY check for True here because can_handle may return a string
# explaining why it can't handle a given source.
handlers = [h for h in plugins() if h.can_handle(source) is True]
installed_to = None
for handler in handlers:
try:
installed_to = handler.install(source)
@ -115,7 +229,9 @@ def install_from_config(config_var_name):
class BaseFetchHandler(object):
"""Base class for FetchHandler implementations in fetch plugins"""
def can_handle(self, source):
"""Returns True if the source can be handled. Otherwise returns
a string explaining why it cannot"""
@ -143,10 +259,13 @@ def plugins(fetch_handlers=None):
for handler_name in fetch_handlers:
package, classname = handler_name.rsplit('.', 1)
try:
handler_class = getattr(importlib.import_module(package), classname)
handler_class = getattr(
importlib.import_module(package),
classname)
plugin_list.append(handler_class())
except (ImportError, AttributeError):
# Skip missing plugins so that they can be ommitted from
# installation if desired
log("FetchHandler {} not found, skipping plugin".format(handler_name))
log("FetchHandler {} not found, skipping plugin".format(
handler_name))
return plugin_list

View File

@ -8,6 +8,7 @@ from charmhelpers.payload.archive import (
get_archive_handler,
extract,
)
from charmhelpers.core.host import mkdir
class ArchiveUrlFetchHandler(BaseFetchHandler):
@ -24,20 +25,24 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
# propogate all exceptions
# URLError, OSError, etc
response = urllib2.urlopen(source)
with open(dest, 'w') as dest_file:
dest_file.write(response.read())
try:
with open(dest, 'w') as dest_file:
dest_file.write(response.read())
except Exception as e:
if os.path.isfile(dest):
os.unlink(dest)
raise e
def install(self, source):
url_parts = self.parse_url(source)
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755)
dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
try:
self.download(source, dld_file)
except urllib2.URLError as e:
return UnhandledSource(e.reason)
raise UnhandledSource(e.reason)
except OSError as e:
return UnhandledSource(e.strerror)
finally:
if os.path.isfile(dld_file):
os.unlink(dld_file)
raise UnhandledSource(e.strerror)
return extract(dld_file)

View File

@ -0,0 +1,49 @@
import os
from charmhelpers.fetch import (
BaseFetchHandler,
UnhandledSource
)
from charmhelpers.core.host import mkdir
try:
from bzrlib.branch import Branch
except ImportError:
from charmhelpers.fetch import apt_install
apt_install("python-bzrlib")
from bzrlib.branch import Branch
class BzrUrlFetchHandler(BaseFetchHandler):
"""Handler for bazaar branches via generic and lp URLs"""
def can_handle(self, source):
url_parts = self.parse_url(source)
if url_parts.scheme not in ('bzr+ssh', 'lp'):
return False
else:
return True
def branch(self, source, dest):
url_parts = self.parse_url(source)
# If we use lp:branchname scheme we need to load plugins
if not self.can_handle(source):
raise UnhandledSource("Cannot handle {}".format(source))
if url_parts.scheme == "lp":
from bzrlib.plugin import load_plugins
load_plugins()
try:
remote_branch = Branch.open(source)
remote_branch.bzrdir.sprout(dest).open_branch()
except Exception as e:
raise e
def install(self, source):
url_parts = self.parse_url(source)
branch_name = url_parts.path.strip("/").split("/")[-1]
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name)
if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755)
try:
self.branch(source, dest_dir)
except OSError as e:
raise UnhandledSource(e.strerror)
return dest_dir

View File

@ -21,21 +21,27 @@ from charmhelpers.core.hookenv import (
related_units,
relation_get,
Hooks,
UnregisteredHookError
UnregisteredHookError,
service_name
)
from charmhelpers.core.host import (
umount,
mkdir
)
from charmhelpers.fetch import (
add_source,
apt_install,
apt_update,
filter_installed_packages,
umount
)
from charmhelpers.fetch import add_source
from utils import (
render_template,
get_host_ip,
)
from charmhelpers.contrib.openstack.alternatives import install_alternative
hooks = Hooks()
@ -66,9 +72,14 @@ def emit_cephconf():
'fsid': get_fsid(),
'version': ceph.get_ceph_version()
}
with open('/etc/ceph/ceph.conf', 'w') as cephconf:
# Install ceph.conf as an alternative to support
# co-existence with other charms that write this file
charm_ceph_conf = "/var/lib/charm/{}/ceph.conf".format(service_name())
mkdir(os.path.dirname(charm_ceph_conf))
with open(charm_ceph_conf, 'w') as cephconf:
cephconf.write(render_template('ceph.conf', cephcontext))
install_alternative('ceph.conf', '/etc/ceph/ceph.conf',
charm_ceph_conf, 90)
JOURNAL_ZAPPED = '/var/lib/ceph/journal_zapped'

View File

@ -13,7 +13,7 @@ from charmhelpers.core.hookenv import (
unit_get,
cached
)
from charmhelpers.core.host import (
from charmhelpers.fetch import (
apt_install,
filter_installed_packages
)

View File

@ -1 +1 @@
11
13