[niedbalski,r=hopem]

Adds sysctl config option to allow dict of
sysctl key/val pairs to be set on ceph nodes.
This commit is contained in:
Edward Hope-Morley 2014-11-25 18:29:07 +00:00
commit 0099ed84bf
17 changed files with 222 additions and 126 deletions

View File

@ -155,3 +155,9 @@ options:
order for this charm to function correctly, the privacy extension must be
disabled and a non-temporary address must be configured/available on
your network interface.
sysctl:
type: string
default: ''
description: |
YAML-formatted associative array of sysctl key/value pairs to be set
persistently e.g. '{ kernel.pid_max : 4194303 }'.

View File

@ -1,7 +1,6 @@
import glob
import re
import subprocess
import sys
from functools import partial
@ -33,31 +32,28 @@ def _validate_cidr(network):
network)
def no_ip_found_error_out(network):
errmsg = ("No IP address found in network: %s" % network)
raise ValueError(errmsg)
def get_address_in_network(network, fallback=None, fatal=False):
"""
Get an IPv4 or IPv6 address within the network from the host.
"""Get an IPv4 or IPv6 address within the network from the host.
:param network (str): CIDR presentation format. For example,
'192.168.1.0/24'.
:param fallback (str): If no address is found, return fallback.
:param fatal (boolean): If no address is found, fallback is not
set and fatal is True then exit(1).
"""
def not_found_error_out():
log("No IP address found in network: %s" % network,
level=ERROR)
sys.exit(1)
if network is None:
if fallback is not None:
return fallback
if fatal:
no_ip_found_error_out(network)
else:
if fatal:
not_found_error_out()
else:
return None
return None
_validate_cidr(network)
network = netaddr.IPNetwork(network)
@ -69,6 +65,7 @@ def get_address_in_network(network, fallback=None, fatal=False):
cidr = netaddr.IPNetwork("%s/%s" % (addr, netmask))
if cidr in network:
return str(cidr.ip)
if network.version == 6 and netifaces.AF_INET6 in addresses:
for addr in addresses[netifaces.AF_INET6]:
if not addr['addr'].startswith('fe80'):
@ -81,20 +78,20 @@ def get_address_in_network(network, fallback=None, fatal=False):
return fallback
if fatal:
not_found_error_out()
no_ip_found_error_out(network)
return None
def is_ipv6(address):
'''Determine whether provided address is IPv6 or not'''
"""Determine whether provided address is IPv6 or not."""
try:
address = netaddr.IPAddress(address)
except netaddr.AddrFormatError:
# probably a hostname - so not an address at all!
return False
else:
return address.version == 6
return address.version == 6
def is_address_in_network(network, address):
@ -112,11 +109,13 @@ def is_address_in_network(network, address):
except (netaddr.core.AddrFormatError, ValueError):
raise ValueError("Network (%s) is not in CIDR presentation format" %
network)
try:
address = netaddr.IPAddress(address)
except (netaddr.core.AddrFormatError, ValueError):
raise ValueError("Address (%s) is not in correct presentation format" %
address)
if address in network:
return True
else:
@ -146,6 +145,7 @@ def _get_for_address(address, key):
return iface
else:
return addresses[netifaces.AF_INET][0][key]
if address.version == 6 and netifaces.AF_INET6 in addresses:
for addr in addresses[netifaces.AF_INET6]:
if not addr['addr'].startswith('fe80'):
@ -159,40 +159,48 @@ def _get_for_address(address, key):
return str(cidr).split('/')[1]
else:
return addr[key]
return None
get_iface_for_address = partial(_get_for_address, key='iface')
get_netmask_for_address = partial(_get_for_address, key='netmask')
def format_ipv6_addr(address):
"""
IPv6 needs to be wrapped with [] in url link to parse correctly.
"""If address is IPv6, wrap it in '[]' otherwise return None.
This is required by most configuration files when specifying IPv6
addresses.
"""
if is_ipv6(address):
<<<<<<< TREE
return "[%s]" % address
=======
address = "[%s]" % address
else:
address = None
>>>>>>> MERGE-SOURCE
return address
return None
def get_iface_addr(iface='eth0', inet_type='AF_INET', inc_aliases=False,
fatal=True, exc_list=None):
"""
Return the assigned IP address for a given interface, if any, or [].
"""
"""Return the assigned IP address for a given interface, if any."""
# Extract nic if passed /dev/ethX
if '/' in iface:
iface = iface.split('/')[-1]
if not exc_list:
exc_list = []
try:
inet_num = getattr(netifaces, inet_type)
except AttributeError:
raise Exception('Unknown inet type ' + str(inet_type))
raise Exception("Unknown inet type '%s'" % str(inet_type))
interfaces = netifaces.interfaces()
if inc_aliases:
@ -200,15 +208,18 @@ def get_iface_addr(iface='eth0', inet_type='AF_INET', inc_aliases=False,
for _iface in interfaces:
if iface == _iface or _iface.split(':')[0] == iface:
ifaces.append(_iface)
if fatal and not ifaces:
raise Exception("Invalid interface '%s'" % iface)
ifaces.sort()
else:
if iface not in interfaces:
if fatal:
raise Exception("%s not found " % (iface))
raise Exception("Interface '%s' not found " % (iface))
else:
return []
else:
ifaces = [iface]
@ -219,10 +230,13 @@ def get_iface_addr(iface='eth0', inet_type='AF_INET', inc_aliases=False,
for entry in net_info[inet_num]:
if 'addr' in entry and entry['addr'] not in exc_list:
addresses.append(entry['addr'])
if fatal and not addresses:
raise Exception("Interface '%s' doesn't have any %s addresses." %
(iface, inet_type))
return addresses
return sorted(addresses)
get_ipv4_addr = partial(get_iface_addr, inet_type='AF_INET')
@ -239,6 +253,7 @@ def get_iface_from_addr(addr):
raw = re.match(ll_key, _addr)
if raw:
_addr = raw.group(1)
if _addr == addr:
log("Address '%s' is configured on iface '%s'" %
(addr, iface))
@ -249,8 +264,9 @@ def get_iface_from_addr(addr):
def sniff_iface(f):
"""If no iface provided, inject net iface inferred from unit private
address.
"""Ensure decorated function is called with a value for iface.
If no iface provided, inject net iface inferred from unit private address.
"""
def iface_sniffer(*args, **kwargs):
if not kwargs.get('iface', None):
@ -293,7 +309,7 @@ def get_ipv6_addr(iface=None, inc_aliases=False, fatal=True, exc_list=None,
if global_addrs:
# Make sure any found global addresses are not temporary
cmd = ['ip', 'addr', 'show', iface]
out = subprocess.check_output(cmd)
out = subprocess.check_output(cmd).decode('UTF-8')
if dynamic_only:
key = re.compile("inet6 (.+)/[0-9]+ scope global dynamic.*")
else:
@ -315,33 +331,28 @@ def get_ipv6_addr(iface=None, inc_aliases=False, fatal=True, exc_list=None,
return addrs
if fatal:
raise Exception("Interface '%s' doesn't have a scope global "
raise Exception("Interface '%s' does not have a scope global "
"non-temporary ipv6 address." % iface)
return []
def get_bridges(vnic_dir='/sys/devices/virtual/net'):
"""
Return a list of bridges on the system or []
"""
b_rgex = vnic_dir + '/*/bridge'
return [x.replace(vnic_dir, '').split('/')[1] for x in glob.glob(b_rgex)]
"""Return a list of bridges on the system."""
b_regex = "%s/*/bridge" % vnic_dir
return [x.replace(vnic_dir, '').split('/')[1] for x in glob.glob(b_regex)]
def get_bridge_nics(bridge, vnic_dir='/sys/devices/virtual/net'):
"""
Return a list of nics comprising a given bridge on the system or []
"""
brif_rgex = "%s/%s/brif/*" % (vnic_dir, bridge)
return [x.split('/')[-1] for x in glob.glob(brif_rgex)]
"""Return a list of nics comprising a given bridge on the system."""
brif_regex = "%s/%s/brif/*" % (vnic_dir, bridge)
return [x.split('/')[-1] for x in glob.glob(brif_regex)]
def is_bridge_member(nic):
"""
Check if a given nic is a member of a bridge
"""
"""Check if a given nic is a member of a bridge."""
for bridge in get_bridges():
if nic in get_bridge_nics(bridge):
return True
return False

View File

@ -30,7 +30,8 @@ def zap_disk(block_device):
# sometimes sgdisk exits non-zero; this is OK, dd will clean up
call(['sgdisk', '--zap-all', '--mbrtogpt',
'--clear', block_device])
dev_end = check_output(['blockdev', '--getsz', block_device])
dev_end = check_output(['blockdev', '--getsz',
block_device]).decode('UTF-8')
gpt_end = int(dev_end.split()[0]) - 100
check_call(['dd', 'if=/dev/zero', 'of=%s' % (block_device),
'bs=1M', 'count=1'])
@ -47,7 +48,7 @@ def is_device_mounted(device):
it doesn't.
'''
is_partition = bool(re.search(r".*[0-9]+\b", device))
out = check_output(['mount'])
out = check_output(['mount']).decode('UTF-8')
if is_partition:
return bool(re.search(device + r"\b", out))
return bool(re.search(device + r"[0-9]+\b", out))

View File

@ -3,10 +3,11 @@
__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
import io
import os
class Fstab(file):
class Fstab(io.FileIO):
"""This class extends file in order to implement a file reader/writer
for file `/etc/fstab`
"""
@ -24,8 +25,8 @@ class Fstab(file):
options = "defaults"
self.options = options
self.d = d
self.p = p
self.d = int(d)
self.p = int(p)
def __eq__(self, o):
return str(self) == str(o)
@ -45,7 +46,7 @@ class Fstab(file):
self._path = path
else:
self._path = self.DEFAULT_PATH
file.__init__(self, self._path, 'r+')
super(Fstab, self).__init__(self._path, 'rb+')
def _hydrate_entry(self, line):
# NOTE: use split with no arguments to split on any
@ -58,8 +59,9 @@ class Fstab(file):
def entries(self):
self.seek(0)
for line in self.readlines():
line = line.decode('us-ascii')
try:
if not line.startswith("#"):
if line.strip() and not line.startswith("#"):
yield self._hydrate_entry(line)
except ValueError:
pass
@ -75,14 +77,14 @@ class Fstab(file):
if self.get_entry_by_attr('device', entry.device):
return False
self.write(str(entry) + '\n')
self.write((str(entry) + '\n').encode('us-ascii'))
self.truncate()
return entry
def remove_entry(self, entry):
self.seek(0)
lines = self.readlines()
lines = [l.decode('us-ascii') for l in self.readlines()]
found = False
for index, line in enumerate(lines):
@ -97,7 +99,7 @@ class Fstab(file):
lines.remove(line)
self.seek(0)
self.write(''.join(lines))
self.write(''.join(lines).encode('us-ascii'))
self.truncate()
return True

View File

@ -9,9 +9,14 @@ import json
import yaml
import subprocess
import sys
import UserDict
from subprocess import CalledProcessError
import six
if not six.PY3:
from UserDict import UserDict
else:
from collections import UserDict
CRITICAL = "CRITICAL"
ERROR = "ERROR"
WARNING = "WARNING"
@ -67,12 +72,12 @@ def log(message, level=None):
subprocess.call(command)
class Serializable(UserDict.IterableUserDict):
class Serializable(UserDict):
"""Wrapper, an object that can be serialized to yaml or json"""
def __init__(self, obj):
# wrap the object
UserDict.IterableUserDict.__init__(self)
UserDict.__init__(self)
self.data = obj
def __getattr__(self, attr):
@ -269,7 +274,7 @@ class Config(dict):
"""
if self._prev_dict:
for k, v in self._prev_dict.iteritems():
for k, v in six.iteritems(self._prev_dict):
if k not in self:
self[k] = v
with open(self.path, 'w') as f:
@ -284,7 +289,8 @@ def config(scope=None):
config_cmd_line.append(scope)
config_cmd_line.append('--format=json')
try:
config_data = json.loads(subprocess.check_output(config_cmd_line))
config_data = json.loads(
subprocess.check_output(config_cmd_line).decode('UTF-8'))
if scope is not None:
return config_data
return Config(config_data)
@ -303,10 +309,10 @@ def relation_get(attribute=None, unit=None, rid=None):
if unit:
_args.append(unit)
try:
return json.loads(subprocess.check_output(_args))
return json.loads(subprocess.check_output(_args).decode('UTF-8'))
except ValueError:
return None
except CalledProcessError, e:
except CalledProcessError as e:
if e.returncode == 2:
return None
raise
@ -318,7 +324,7 @@ def relation_set(relation_id=None, relation_settings=None, **kwargs):
relation_cmd_line = ['relation-set']
if relation_id is not None:
relation_cmd_line.extend(('-r', relation_id))
for k, v in (relation_settings.items() + kwargs.items()):
for k, v in (list(relation_settings.items()) + list(kwargs.items())):
if v is None:
relation_cmd_line.append('{}='.format(k))
else:
@ -335,7 +341,8 @@ def relation_ids(reltype=None):
relid_cmd_line = ['relation-ids', '--format=json']
if reltype is not None:
relid_cmd_line.append(reltype)
return json.loads(subprocess.check_output(relid_cmd_line)) or []
return json.loads(
subprocess.check_output(relid_cmd_line).decode('UTF-8')) or []
return []
@ -346,7 +353,8 @@ def related_units(relid=None):
units_cmd_line = ['relation-list', '--format=json']
if relid is not None:
units_cmd_line.extend(('-r', relid))
return json.loads(subprocess.check_output(units_cmd_line)) or []
return json.loads(
subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
@cached
@ -455,7 +463,7 @@ def unit_get(attribute):
"""Get the unit ID for the remote unit"""
_args = ['unit-get', '--format=json', attribute]
try:
return json.loads(subprocess.check_output(_args))
return json.loads(subprocess.check_output(_args).decode('UTF-8'))
except ValueError:
return None

View File

@ -14,11 +14,12 @@ import string
import subprocess
import hashlib
from contextlib import contextmanager
from collections import OrderedDict
from hookenv import log
from fstab import Fstab
import six
from .hookenv import log
from .fstab import Fstab
def service_start(service_name):
@ -54,7 +55,9 @@ def service(action, service_name):
def service_running(service):
"""Determine whether a system service is running"""
try:
output = subprocess.check_output(['service', service, 'status'], stderr=subprocess.STDOUT)
output = subprocess.check_output(
['service', service, 'status'],
stderr=subprocess.STDOUT).decode('UTF-8')
except subprocess.CalledProcessError:
return False
else:
@ -67,7 +70,9 @@ def service_running(service):
def service_available(service_name):
"""Determine whether a system service is available"""
try:
subprocess.check_output(['service', service_name, 'status'], stderr=subprocess.STDOUT)
subprocess.check_output(
['service', service_name, 'status'],
stderr=subprocess.STDOUT).decode('UTF-8')
except subprocess.CalledProcessError as e:
return 'unrecognized service' not in e.output
else:
@ -115,7 +120,7 @@ def rsync(from_path, to_path, flags='-r', options=None):
cmd.append(from_path)
cmd.append(to_path)
log(" ".join(cmd))
return subprocess.check_output(cmd).strip()
return subprocess.check_output(cmd).decode('UTF-8').strip()
def symlink(source, destination):
@ -130,7 +135,7 @@ def symlink(source, destination):
subprocess.check_call(cmd)
def mkdir(path, owner='root', group='root', perms=0555, force=False):
def mkdir(path, owner='root', group='root', perms=0o555, force=False):
"""Create a directory"""
log("Making dir {} {}:{} {:o}".format(path, owner, group,
perms))
@ -146,7 +151,7 @@ def mkdir(path, owner='root', group='root', perms=0555, force=False):
os.chown(realpath, uid, gid)
def write_file(path, content, owner='root', group='root', perms=0444):
def write_file(path, content, owner='root', group='root', perms=0o444):
"""Create or overwrite a file with the contents of a string"""
log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
uid = pwd.getpwnam(owner).pw_uid
@ -177,7 +182,7 @@ def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"):
cmd_args.extend([device, mountpoint])
try:
subprocess.check_output(cmd_args)
except subprocess.CalledProcessError, e:
except subprocess.CalledProcessError as e:
log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output))
return False
@ -191,7 +196,7 @@ def umount(mountpoint, persist=False):
cmd_args = ['umount', mountpoint]
try:
subprocess.check_output(cmd_args)
except subprocess.CalledProcessError, e:
except subprocess.CalledProcessError as e:
log('Error unmounting {}\n{}'.format(mountpoint, e.output))
return False
@ -218,8 +223,8 @@ def file_hash(path, hash_type='md5'):
"""
if os.path.exists(path):
h = getattr(hashlib, hash_type)()
with open(path, 'r') as source:
h.update(source.read()) # IGNORE:E1101 - it does have update
with open(path, 'rb') as source:
h.update(source.read())
return h.hexdigest()
else:
return None
@ -297,7 +302,7 @@ def pwgen(length=None):
if length is None:
length = random.choice(range(35, 45))
alphanumeric_chars = [
l for l in (string.letters + string.digits)
l for l in (string.ascii_letters + string.digits)
if l not in 'l0QD1vAEIOUaeiou']
random_chars = [
random.choice(alphanumeric_chars) for _ in range(length)]
@ -306,14 +311,14 @@ def pwgen(length=None):
def list_nics(nic_type):
'''Return a list of nics of given type(s)'''
if isinstance(nic_type, basestring):
if isinstance(nic_type, six.string_types):
int_types = [nic_type]
else:
int_types = nic_type
interfaces = []
for int_type in int_types:
cmd = ['ip', 'addr', 'show', 'label', int_type + '*']
ip_output = subprocess.check_output(cmd).split('\n')
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
ip_output = (line for line in ip_output if line)
for line in ip_output:
if line.split()[1].startswith(int_type):
@ -335,7 +340,7 @@ def set_nic_mtu(nic, mtu):
def get_nic_mtu(nic):
cmd = ['ip', 'addr', 'show', nic]
ip_output = subprocess.check_output(cmd).split('\n')
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
mtu = ""
for line in ip_output:
words = line.split()
@ -346,7 +351,7 @@ def get_nic_mtu(nic):
def get_nic_hwaddr(nic):
cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
ip_output = subprocess.check_output(cmd)
ip_output = subprocess.check_output(cmd).decode('UTF-8')
hwaddr = ""
words = ip_output.split()
if 'link/ether' in words:

View File

@ -196,7 +196,7 @@ class StoredContext(dict):
if not os.path.isabs(file_name):
file_name = os.path.join(hookenv.charm_dir(), file_name)
with open(file_name, 'w') as file_stream:
os.fchmod(file_stream.fileno(), 0600)
os.fchmod(file_stream.fileno(), 0o600)
yaml.dump(config_data, file_stream)
def read_context(self, file_name):
@ -211,15 +211,19 @@ class StoredContext(dict):
class TemplateCallback(ManagerCallback):
"""
Callback class that will render a Jinja2 template, for use as a ready action.
Callback class that will render a Jinja2 template, for use as a ready
action.
:param str source: The template source file, relative to
`$CHARM_DIR/templates`
:param str source: The template source file, relative to `$CHARM_DIR/templates`
:param str target: The target to write the rendered template to
:param str owner: The owner of the rendered file
:param str group: The group of the rendered file
:param int perms: The permissions of the rendered file
"""
def __init__(self, source, target, owner='root', group='root', perms=0444):
def __init__(self, source, target,
owner='root', group='root', perms=0o444):
self.source = source
self.target = target
self.owner = owner

View File

@ -4,7 +4,8 @@ from charmhelpers.core import host
from charmhelpers.core import hookenv
def render(source, target, context, owner='root', group='root', perms=0444, templates_dir=None):
def render(source, target, context, owner='root', group='root',
perms=0o444, templates_dir=None):
"""
Render a template.

View File

@ -5,10 +5,6 @@ from yaml import safe_load
from charmhelpers.core.host import (
lsb_release
)
from urlparse import (
urlparse,
urlunparse,
)
import subprocess
from charmhelpers.core.hookenv import (
config,
@ -16,6 +12,12 @@ from charmhelpers.core.hookenv import (
)
import os
import six
if six.PY3:
from urllib.parse import urlparse, urlunparse
else:
from urlparse import urlparse, urlunparse
CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
@ -149,7 +151,7 @@ def apt_install(packages, options=None, fatal=False):
cmd = ['apt-get', '--assume-yes']
cmd.extend(options)
cmd.append('install')
if isinstance(packages, basestring):
if isinstance(packages, six.string_types):
cmd.append(packages)
else:
cmd.extend(packages)
@ -182,7 +184,7 @@ def apt_update(fatal=False):
def apt_purge(packages, fatal=False):
"""Purge one or more packages"""
cmd = ['apt-get', '--assume-yes', 'purge']
if isinstance(packages, basestring):
if isinstance(packages, six.string_types):
cmd.append(packages)
else:
cmd.extend(packages)
@ -193,7 +195,7 @@ def apt_purge(packages, fatal=False):
def apt_hold(packages, fatal=False):
"""Hold one or more packages"""
cmd = ['apt-mark', 'hold']
if isinstance(packages, basestring):
if isinstance(packages, six.string_types):
cmd.append(packages)
else:
cmd.extend(packages)
@ -260,7 +262,7 @@ def add_source(source, key=None):
if key:
if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key:
with NamedTemporaryFile() as key_file:
with NamedTemporaryFile('w+') as key_file:
key_file.write(key)
key_file.flush()
key_file.seek(0)
@ -297,14 +299,14 @@ def configure_sources(update=False,
sources = safe_load((config(sources_var) or '').strip()) or []
keys = safe_load((config(keys_var) or '').strip()) or None
if isinstance(sources, basestring):
if isinstance(sources, six.string_types):
sources = [sources]
if keys is None:
for source in sources:
add_source(source, None)
else:
if isinstance(keys, basestring):
if isinstance(keys, six.string_types):
keys = [keys]
if len(sources) != len(keys):
@ -401,7 +403,7 @@ def _run_apt_command(cmd, fatal=False):
while result is None or result == APT_NO_LOCK:
try:
result = subprocess.check_call(cmd, env=env)
except subprocess.CalledProcessError, e:
except subprocess.CalledProcessError as e:
retry_count = retry_count + 1
if retry_count > APT_NO_LOCK_RETRY_COUNT:
raise

View File

@ -1,8 +1,23 @@
import os
import urllib2
from urllib import urlretrieve
import urlparse
import hashlib
import re
import six
if six.PY3:
from urllib.request import (
build_opener, install_opener, urlopen, urlretrieve,
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
)
from urllib.parse import urlparse, urlunparse, parse_qs
from urllib.error import URLError
else:
from urllib import urlretrieve
from urllib2 import (
build_opener, install_opener, urlopen,
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
URLError
)
from urlparse import urlparse, urlunparse, parse_qs
from charmhelpers.fetch import (
BaseFetchHandler,
@ -15,6 +30,24 @@ from charmhelpers.payload.archive import (
from charmhelpers.core.host import mkdir, check_hash
def splituser(host):
'''urllib.splituser(), but six's support of this seems broken'''
_userprog = re.compile('^(.*)@(.*)$')
match = _userprog.match(host)
if match:
return match.group(1, 2)
return None, host
def splitpasswd(user):
'''urllib.splitpasswd(), but six's support of this is missing'''
_passwdprog = re.compile('^([^:]*):(.*)$', re.S)
match = _passwdprog.match(user)
if match:
return match.group(1, 2)
return user, None
class ArchiveUrlFetchHandler(BaseFetchHandler):
"""
Handler to download archive files from arbitrary URLs.
@ -42,20 +75,20 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
"""
# propogate all exceptions
# URLError, OSError, etc
proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
proto, netloc, path, params, query, fragment = urlparse(source)
if proto in ('http', 'https'):
auth, barehost = urllib2.splituser(netloc)
auth, barehost = splituser(netloc)
if auth is not None:
source = urlparse.urlunparse((proto, barehost, path, params, query, fragment))
username, password = urllib2.splitpasswd(auth)
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
source = urlunparse((proto, barehost, path, params, query, fragment))
username, password = splitpasswd(auth)
passman = HTTPPasswordMgrWithDefaultRealm()
# Realm is set to None in add_password to force the username and password
# to be used whatever the realm
passman.add_password(None, source, username, password)
authhandler = urllib2.HTTPBasicAuthHandler(passman)
opener = urllib2.build_opener(authhandler)
urllib2.install_opener(opener)
response = urllib2.urlopen(source)
authhandler = HTTPBasicAuthHandler(passman)
opener = build_opener(authhandler)
install_opener(opener)
response = urlopen(source)
try:
with open(dest, 'w') as dest_file:
dest_file.write(response.read())
@ -91,17 +124,21 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
url_parts = self.parse_url(source)
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755)
mkdir(dest_dir, perms=0o755)
dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
try:
self.download(source, dld_file)
except urllib2.URLError as e:
except URLError as e:
raise UnhandledSource(e.reason)
except OSError as e:
raise UnhandledSource(e.strerror)
options = urlparse.parse_qs(url_parts.fragment)
options = parse_qs(url_parts.fragment)
for key, value in options.items():
if key in hashlib.algorithms:
if not six.PY3:
algorithms = hashlib.algorithms
else:
algorithms = hashlib.algorithms_available
if key in algorithms:
check_hash(dld_file, value, key)
if checksum:
check_hash(dld_file, checksum, hash_type)

View File

@ -5,6 +5,10 @@ from charmhelpers.fetch import (
)
from charmhelpers.core.host import mkdir
import six
if six.PY3:
raise ImportError('bzrlib does not support Python3')
try:
from bzrlib.branch import Branch
except ImportError:
@ -42,7 +46,7 @@ class BzrUrlFetchHandler(BaseFetchHandler):
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
branch_name)
if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755)
mkdir(dest_dir, perms=0o755)
try:
self.branch(source, dest_dir)
except OSError as e:

View File

@ -5,6 +5,10 @@ from charmhelpers.fetch import (
)
from charmhelpers.core.host import mkdir
import six
if six.PY3:
raise ImportError('GitPython does not support Python 3')
try:
from git import Repo
except ImportError:
@ -17,7 +21,7 @@ class GitUrlFetchHandler(BaseFetchHandler):
"""Handler for git branches via generic and github URLs"""
def can_handle(self, source):
url_parts = self.parse_url(source)
#TODO (mattyw) no support for ssh git@ yet
# TODO (mattyw) no support for ssh git@ yet
if url_parts.scheme not in ('http', 'https', 'git'):
return False
else:
@ -36,7 +40,7 @@ class GitUrlFetchHandler(BaseFetchHandler):
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
branch_name)
if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755)
mkdir(dest_dir, perms=0o755)
try:
self.clone(source, dest_dir, branch)
except OSError as e:

View File

@ -45,6 +45,8 @@ from charmhelpers.contrib.network.ip import (
get_ipv6_addr,
format_ipv6_addr
)
from charmhelpers.core.sysctl import create as create_sysctl
from utils import (
render_template,
get_public_addr,
@ -122,6 +124,10 @@ def config_changed():
log('Invalid OSD disk format configuration specified', level=ERROR)
sys.exit(1)
sysctl_dict = config('sysctl')
if sysctl_dict:
create_sysctl(sysctl_dict, '/etc/sysctl.d/50-ceph-charm.conf')
emit_cephconf()
e_mountpoint = config('ephemeral-unmount')

View File

@ -1,6 +1,6 @@
import amulet
import os
import six
class AmuletDeployment(object):
@ -52,12 +52,12 @@ class AmuletDeployment(object):
def _add_relations(self, relations):
"""Add all of the relations for the services."""
for k, v in relations.iteritems():
for k, v in six.iteritems(relations):
self.d.relate(k, v)
def _configure_services(self, configs):
"""Configure all of the services."""
for service, config in configs.iteritems():
for service, config in six.iteritems(configs):
self.d.configure(service, config)
def _deploy(self):

View File

@ -5,6 +5,8 @@ import re
import sys
import time
import six
class AmuletUtils(object):
"""Amulet utilities.
@ -58,7 +60,7 @@ class AmuletUtils(object):
Verify the specified services are running on the corresponding
service units.
"""
for k, v in commands.iteritems():
for k, v in six.iteritems(commands):
for cmd in v:
output, code = k.run(cmd)
if code != 0:
@ -100,11 +102,11 @@ class AmuletUtils(object):
longs, or can be a function that evaluate a variable and returns a
bool.
"""
for k, v in expected.iteritems():
for k, v in six.iteritems(expected):
if k in actual:
if (isinstance(v, basestring) or
if (isinstance(v, six.string_types) or
isinstance(v, bool) or
isinstance(v, (int, long))):
isinstance(v, six.integer_types)):
if v != actual[k]:
return "{}:{}".format(k, actual[k])
elif not v(actual[k]):

View File

@ -1,3 +1,4 @@
import six
from charmhelpers.contrib.amulet.deployment import (
AmuletDeployment
)
@ -69,7 +70,7 @@ class OpenStackAmuletDeployment(AmuletDeployment):
def _configure_services(self, configs):
"""Configure all of the services."""
for service, config in configs.iteritems():
for service, config in six.iteritems(configs):
self.d.configure(service, config)
def _get_openstack_release(self):

View File

@ -7,6 +7,8 @@ import glanceclient.v1.client as glance_client
import keystoneclient.v2_0 as keystone_client
import novaclient.v1_1.client as nova_client
import six
from charmhelpers.contrib.amulet.utils import (
AmuletUtils
)
@ -60,7 +62,7 @@ class OpenStackAmuletUtils(AmuletUtils):
expected service catalog endpoints.
"""
self.log.debug('actual: {}'.format(repr(actual)))
for k, v in expected.iteritems():
for k, v in six.iteritems(expected):
if k in actual:
ret = self._validate_dict_data(expected[k][0], actual[k][0])
if ret: