Helpful cleanups.

1. Remove the usage of the path.join function
   now that all code should be going through
   the util file methods (and they can be
   mocked out as needed).
2. Adjust all occurences of the above join
   function to either not use it or replace
   it with the standard os.path.join (which
   can also be mocked out as needed)
3. Fix pylint from complaining about the
   tests folder 'helpers.py' not being found
4. Add a pylintrc file that is used instead
   of the options hidden in the 'run_pylint' 
   tool.
This commit is contained in:
harlowja 2012-10-27 19:25:48 -07:00
parent 47c95ed210
commit 4d1d7a9bb5
35 changed files with 170 additions and 251 deletions

View File

@ -1,20 +1,20 @@
CWD=$(shell pwd)
PY_FILES=$(shell find cloudinit bin tests tools -name "*.py")
PY_FILES=$(shell find cloudinit bin tests tools -type f -name "*.py")
PY_FILES+="bin/cloud-init"
all: test
pep8:
$(CWD)/tools/run-pep8 $(PY_FILES)
@$(CWD)/tools/run-pep8 $(PY_FILES)
pylint:
$(CWD)/tools/run-pylint $(PY_FILES)
@$(CWD)/tools/run-pylint $(PY_FILES)
pyflakes:
pyflakes $(PY_FILES)
test:
nosetests $(noseopts) tests/unittests/
@nosetests $(noseopts) tests/
2to3:
2to3 $(PY_FILES)

View File

@ -34,26 +34,24 @@ APT_PIPE_TPL = ("//Written by cloud-init per 'apt_pipelining'\n"
# on TCP connections - otherwise data corruption will occur.
def handle(_name, cfg, cloud, log, _args):
def handle(_name, cfg, _cloud, log, _args):
apt_pipe_value = util.get_cfg_option_str(cfg, "apt_pipelining", False)
apt_pipe_value_s = str(apt_pipe_value).lower().strip()
if apt_pipe_value_s == "false":
write_apt_snippet(cloud, "0", log, DEFAULT_FILE)
write_apt_snippet("0", log, DEFAULT_FILE)
elif apt_pipe_value_s in ("none", "unchanged", "os"):
return
elif apt_pipe_value_s in [str(b) for b in xrange(0, 6)]:
write_apt_snippet(cloud, apt_pipe_value_s, log, DEFAULT_FILE)
write_apt_snippet(apt_pipe_value_s, log, DEFAULT_FILE)
else:
log.warn("Invalid option for apt_pipeling: %s", apt_pipe_value)
def write_apt_snippet(cloud, setting, log, f_name):
def write_apt_snippet(setting, log, f_name):
"""Writes f_name with apt pipeline depth 'setting'."""
file_contents = APT_PIPE_TPL % (setting)
util.write_file(cloud.paths.join(False, f_name), file_contents)
util.write_file(f_name, file_contents)
log.debug("Wrote %s with apt pipeline depth setting %s", f_name, setting)

View File

@ -78,8 +78,7 @@ def handle(name, cfg, cloud, log, _args):
try:
# See man 'apt.conf'
contents = PROXY_TPL % (proxy)
util.write_file(cloud.paths.join(False, proxy_filename),
contents)
util.write_file(proxy_filename, contents)
except Exception as e:
util.logexc(log, "Failed to write proxy to %s", proxy_filename)
elif os.path.isfile(proxy_filename):
@ -90,7 +89,7 @@ def handle(name, cfg, cloud, log, _args):
params = mirrors
params['RELEASE'] = release
params['MIRROR'] = mirror
errors = add_sources(cloud, cfg['apt_sources'], params)
errors = add_sources(cfg['apt_sources'], params)
for e in errors:
log.warn("Source Error: %s", ':'.join(e))
@ -196,11 +195,10 @@ def generate_sources_list(codename, mirrors, cloud, log):
params = {'codename': codename}
for k in mirrors:
params[k] = mirrors[k]
out_fn = cloud.paths.join(False, '/etc/apt/sources.list')
templater.render_to_file(template_fn, out_fn, params)
templater.render_to_file(template_fn, '/etc/apt/sources.list', params)
def add_sources(cloud, srclist, template_params=None):
def add_sources(srclist, template_params=None):
"""
add entries in /etc/apt/sources.list.d for each abbreviated
sources.list entry in 'srclist'. When rendering template, also
@ -250,8 +248,7 @@ def add_sources(cloud, srclist, template_params=None):
try:
contents = "%s\n" % (source)
util.write_file(cloud.paths.join(False, ent['filename']),
contents, omode="ab")
util.write_file(ent['filename'], contents, omode="ab")
except:
errorlist.append([source,
"failed write to file %s" % ent['filename']])

View File

@ -22,6 +22,7 @@ CA_CERT_PATH = "/usr/share/ca-certificates/"
CA_CERT_FILENAME = "cloud-init-ca-certs.crt"
CA_CERT_CONFIG = "/etc/ca-certificates.conf"
CA_CERT_SYSTEM_PATH = "/etc/ssl/certs/"
CA_CERT_FULL_PATH = os.path.join(CA_CERT_PATH, CA_CERT_FILENAME)
distros = ['ubuntu', 'debian']
@ -33,7 +34,7 @@ def update_ca_certs():
util.subp(["update-ca-certificates"], capture=False)
def add_ca_certs(paths, certs):
def add_ca_certs(certs):
"""
Adds certificates to the system. To actually apply the new certificates
you must also call L{update_ca_certs}.
@ -43,27 +44,24 @@ def add_ca_certs(paths, certs):
if certs:
# First ensure they are strings...
cert_file_contents = "\n".join([str(c) for c in certs])
cert_file_fullpath = os.path.join(CA_CERT_PATH, CA_CERT_FILENAME)
cert_file_fullpath = paths.join(False, cert_file_fullpath)
util.write_file(cert_file_fullpath, cert_file_contents, mode=0644)
util.write_file(CA_CERT_FULL_PATH, cert_file_contents, mode=0644)
# Append cert filename to CA_CERT_CONFIG file.
util.write_file(paths.join(False, CA_CERT_CONFIG),
"\n%s" % CA_CERT_FILENAME, omode="ab")
util.write_file(CA_CERT_CONFIG, "\n%s" % CA_CERT_FILENAME, omode="ab")
def remove_default_ca_certs(paths):
def remove_default_ca_certs():
"""
Removes all default trusted CA certificates from the system. To actually
apply the change you must also call L{update_ca_certs}.
"""
util.delete_dir_contents(paths.join(False, CA_CERT_PATH))
util.delete_dir_contents(paths.join(False, CA_CERT_SYSTEM_PATH))
util.write_file(paths.join(False, CA_CERT_CONFIG), "", mode=0644)
util.delete_dir_contents(CA_CERT_PATH)
util.delete_dir_contents(CA_CERT_SYSTEM_PATH)
util.write_file(CA_CERT_CONFIG, "", mode=0644)
debconf_sel = "ca-certificates ca-certificates/trust_new_crts select no"
util.subp(('debconf-set-selections', '-'), debconf_sel)
def handle(name, cfg, cloud, log, _args):
def handle(name, cfg, _cloud, log, _args):
"""
Call to handle ca-cert sections in cloud-config file.
@ -85,14 +83,14 @@ def handle(name, cfg, cloud, log, _args):
# default trusted CA certs first.
if ca_cert_cfg.get("remove-defaults", False):
log.debug("Removing default certificates")
remove_default_ca_certs(cloud.paths)
remove_default_ca_certs()
# If we are given any new trusted CA certs to add, add them.
if "trusted" in ca_cert_cfg:
trusted_certs = util.get_cfg_option_list(ca_cert_cfg, "trusted")
if trusted_certs:
log.debug("Adding %d certificates" % len(trusted_certs))
add_ca_certs(cloud.paths, trusted_certs)
add_ca_certs(trusted_certs)
# Update the system with the new cert configuration.
log.debug("Updating certificates")

View File

@ -26,6 +26,15 @@ from cloudinit import util
RUBY_VERSION_DEFAULT = "1.8"
CHEF_DIRS = [
'/etc/chef',
'/var/log/chef',
'/var/lib/chef',
'/var/cache/chef',
'/var/backups/chef',
'/var/run/chef',
]
def handle(name, cfg, cloud, log, _args):
@ -37,24 +46,15 @@ def handle(name, cfg, cloud, log, _args):
chef_cfg = cfg['chef']
# Ensure the chef directories we use exist
c_dirs = [
'/etc/chef',
'/var/log/chef',
'/var/lib/chef',
'/var/cache/chef',
'/var/backups/chef',
'/var/run/chef',
]
for d in c_dirs:
util.ensure_dir(cloud.paths.join(False, d))
for d in CHEF_DIRS:
util.ensure_dir(d)
# Set the validation key based on the presence of either 'validation_key'
# or 'validation_cert'. In the case where both exist, 'validation_key'
# takes precedence
for key in ('validation_key', 'validation_cert'):
if key in chef_cfg and chef_cfg[key]:
v_fn = cloud.paths.join(False, '/etc/chef/validation.pem')
util.write_file(v_fn, chef_cfg[key])
util.write_file('/etc/chef/validation.pem', chef_cfg[key])
break
# Create the chef config from template
@ -68,8 +68,7 @@ def handle(name, cfg, cloud, log, _args):
'_default'),
'validation_name': chef_cfg['validation_name']
}
out_fn = cloud.paths.join(False, '/etc/chef/client.rb')
templater.render_to_file(template_fn, out_fn, params)
templater.render_to_file(template_fn, '/etc/chef/client.rb', params)
else:
log.warn("No template found, not rendering to /etc/chef/client.rb")
@ -81,8 +80,7 @@ def handle(name, cfg, cloud, log, _args):
initial_attributes = chef_cfg['initial_attributes']
for k in list(initial_attributes.keys()):
initial_json[k] = initial_attributes[k]
firstboot_fn = cloud.paths.join(False, '/etc/chef/firstboot.json')
util.write_file(firstboot_fn, json.dumps(initial_json))
util.write_file('/etc/chef/firstboot.json', json.dumps(initial_json))
# If chef is not installed, we install chef based on 'install_type'
if not os.path.isfile('/usr/bin/chef-client'):

View File

@ -66,22 +66,16 @@ def handle(_name, cfg, cloud, log, _args):
merge_data = [
LSC_BUILTIN_CFG,
cloud.paths.join(True, LSC_CLIENT_CFG_FILE),
LSC_CLIENT_CFG_FILE,
ls_cloudcfg,
]
merged = merge_together(merge_data)
lsc_client_fn = cloud.paths.join(False, LSC_CLIENT_CFG_FILE)
lsc_dir = cloud.paths.join(False, os.path.dirname(lsc_client_fn))
if not os.path.isdir(lsc_dir):
util.ensure_dir(lsc_dir)
contents = StringIO()
merged.write(contents)
contents.flush()
util.write_file(lsc_client_fn, contents.getvalue())
log.debug("Wrote landscape config file to %s", lsc_client_fn)
util.ensure_dir(os.path.dirname(LSC_CLIENT_CFG_FILE))
util.write_file(LSC_CLIENT_CFG_FILE, contents.getvalue())
log.debug("Wrote landscape config file to %s", LSC_CLIENT_CFG_FILE)
util.write_file(LS_DEFAULT_FILE, "RUN=1\n")
util.subp(["service", "landscape-client", "restart"])

View File

@ -29,6 +29,7 @@ from cloudinit import util
PUBCERT_FILE = "/etc/mcollective/ssl/server-public.pem"
PRICERT_FILE = "/etc/mcollective/ssl/server-private.pem"
SERVER_CFG = '/etc/mcollective/server.cfg'
def handle(name, cfg, cloud, log, _args):
@ -48,26 +49,23 @@ def handle(name, cfg, cloud, log, _args):
if 'conf' in mcollective_cfg:
# Read server.cfg values from the
# original file in order to be able to mix the rest up
server_cfg_fn = cloud.paths.join(True, '/etc/mcollective/server.cfg')
mcollective_config = ConfigObj(server_cfg_fn)
mcollective_config = ConfigObj(SERVER_CFG)
# See: http://tiny.cc/jh9agw
for (cfg_name, cfg) in mcollective_cfg['conf'].iteritems():
if cfg_name == 'public-cert':
pubcert_fn = cloud.paths.join(True, PUBCERT_FILE)
util.write_file(pubcert_fn, cfg, mode=0644)
mcollective_config['plugin.ssl_server_public'] = pubcert_fn
util.write_file(PUBCERT_FILE, cfg, mode=0644)
mcollective_config['plugin.ssl_server_public'] = PUBCERT_FILE
mcollective_config['securityprovider'] = 'ssl'
elif cfg_name == 'private-cert':
pricert_fn = cloud.paths.join(True, PRICERT_FILE)
util.write_file(pricert_fn, cfg, mode=0600)
mcollective_config['plugin.ssl_server_private'] = pricert_fn
util.write_file(PRICERT_FILE, cfg, mode=0600)
mcollective_config['plugin.ssl_server_private'] = PRICERT_FILE
mcollective_config['securityprovider'] = 'ssl'
else:
if isinstance(cfg, (basestring, str)):
# Just set it in the 'main' section
mcollective_config[cfg_name] = cfg
elif isinstance(cfg, (dict)):
# Iterate throug the config items, create a section
# Iterate through the config items, create a section
# if it is needed and then add/or create items as needed
if cfg_name not in mcollective_config.sections:
mcollective_config[cfg_name] = {}
@ -78,14 +76,12 @@ def handle(name, cfg, cloud, log, _args):
mcollective_config[cfg_name] = str(cfg)
# We got all our config as wanted we'll rename
# the previous server.cfg and create our new one
old_fn = cloud.paths.join(False, '/etc/mcollective/server.cfg.old')
util.rename(server_cfg_fn, old_fn)
util.rename(SERVER_CFG, "%s.old" % (SERVER_CFG))
# Now we got the whole file, write to disk...
contents = StringIO()
mcollective_config.write(contents)
contents = contents.getvalue()
server_cfg_rw = cloud.paths.join(False, '/etc/mcollective/server.cfg')
util.write_file(server_cfg_rw, contents, mode=0644)
util.write_file(SERVER_CFG, contents, mode=0644)
# Start mcollective
util.subp(['service', 'mcollective', 'start'], capture=False)

View File

@ -28,6 +28,7 @@ from cloudinit import util
SHORTNAME_FILTER = r"^[x]{0,1}[shv]d[a-z][0-9]*$"
SHORTNAME = re.compile(SHORTNAME_FILTER)
WS = re.compile("[%s]+" % (whitespace))
FSTAB_PATH = "/etc/fstab"
def is_mdname(name):
@ -167,8 +168,7 @@ def handle(_name, cfg, cloud, log, _args):
cc_lines.append('\t'.join(line))
fstab_lines = []
fstab = util.load_file(cloud.paths.join(True, "/etc/fstab"))
for line in fstab.splitlines():
for line in util.load_file(FSTAB_PATH).splitlines():
try:
toks = WS.split(line)
if toks[3].find(comment) != -1:
@ -179,7 +179,7 @@ def handle(_name, cfg, cloud, log, _args):
fstab_lines.extend(cc_lines)
contents = "%s\n" % ('\n'.join(fstab_lines))
util.write_file(cloud.paths.join(False, "/etc/fstab"), contents)
util.write_file(FSTAB_PATH, contents)
if needswap:
try:
@ -188,9 +188,8 @@ def handle(_name, cfg, cloud, log, _args):
util.logexc(log, "Activating swap via 'swapon -a' failed")
for d in dirs:
real_dir = cloud.paths.join(False, d)
try:
util.ensure_dir(real_dir)
util.ensure_dir(d)
except:
util.logexc(log, "Failed to make '%s' config-mount", d)

View File

@ -84,10 +84,10 @@ def handle(name, cfg, cloud, log, args):
for (n, path) in pubkeys.iteritems():
try:
all_keys[n] = util.load_file(cloud.paths.join(True, path))
all_keys[n] = util.load_file(path)
except:
util.logexc(log, ("%s: failed to open, can not"
" phone home that data"), path)
" phone home that data!"), path)
submit_keys = {}
for k in post_list:

View File

@ -21,12 +21,32 @@
from StringIO import StringIO
import os
import pwd
import socket
from cloudinit import helpers
from cloudinit import util
PUPPET_CONF_PATH = '/etc/puppet/puppet.conf'
PUPPET_SSL_CERT_DIR = '/var/lib/puppet/ssl/certs/'
PUPPET_SSL_DIR = '/var/lib/puppet/ssl'
PUPPET_SSL_CERT_PATH = '/var/lib/puppet/ssl/certs/ca.pem'
def _autostart_puppet(log):
# Set puppet to automatically start
if os.path.exists('/etc/default/puppet'):
util.subp(['sed', '-i',
'-e', 's/^START=.*/START=yes/',
'/etc/default/puppet'], capture=False)
elif os.path.exists('/bin/systemctl'):
util.subp(['/bin/systemctl', 'enable', 'puppet.service'],
capture=False)
elif os.path.exists('/sbin/chkconfig'):
util.subp(['/sbin/chkconfig', 'puppet', 'on'], capture=False)
else:
log.warn(("Sorry we do not know how to enable"
" puppet services on this system"))
def handle(name, cfg, cloud, log, _args):
# If there isn't a puppet key in the configuration don't do anything
@ -43,8 +63,7 @@ def handle(name, cfg, cloud, log, _args):
# ... and then update the puppet configuration
if 'conf' in puppet_cfg:
# Add all sections from the conf object to puppet.conf
puppet_conf_fn = cloud.paths.join(True, '/etc/puppet/puppet.conf')
contents = util.load_file(puppet_conf_fn)
contents = util.load_file(PUPPET_CONF_PATH)
# Create object for reading puppet.conf values
puppet_config = helpers.DefaultingConfigParser()
# Read puppet.conf values from original file in order to be able to
@ -53,28 +72,19 @@ def handle(name, cfg, cloud, log, _args):
cleaned_lines = [i.lstrip() for i in contents.splitlines()]
cleaned_contents = '\n'.join(cleaned_lines)
puppet_config.readfp(StringIO(cleaned_contents),
filename=puppet_conf_fn)
filename=PUPPET_CONF_PATH)
for (cfg_name, cfg) in puppet_cfg['conf'].iteritems():
# Cert configuration is a special case
# Dump the puppet master ca certificate in the correct place
if cfg_name == 'ca_cert':
# Puppet ssl sub-directory isn't created yet
# Create it with the proper permissions and ownership
pp_ssl_dir = cloud.paths.join(False, '/var/lib/puppet/ssl')
util.ensure_dir(pp_ssl_dir, 0771)
util.chownbyid(pp_ssl_dir,
pwd.getpwnam('puppet').pw_uid, 0)
pp_ssl_certs = cloud.paths.join(False,
'/var/lib/puppet/ssl/certs/')
util.ensure_dir(pp_ssl_certs)
util.chownbyid(pp_ssl_certs,
pwd.getpwnam('puppet').pw_uid, 0)
pp_ssl_ca_certs = cloud.paths.join(False,
('/var/lib/puppet/'
'ssl/certs/ca.pem'))
util.write_file(pp_ssl_ca_certs, cfg)
util.chownbyid(pp_ssl_ca_certs,
pwd.getpwnam('puppet').pw_uid, 0)
util.ensure_dir(PUPPET_SSL_DIR, 0771)
util.chownbyname(PUPPET_SSL_DIR, 'puppet', 'root')
util.ensure_dir(PUPPET_SSL_CERT_DIR)
util.chownbyname(PUPPET_SSL_CERT_DIR, 'puppet', 'root')
util.write_file(PUPPET_SSL_CERT_PATH, str(cfg))
util.chownbyname(PUPPET_SSL_CERT_PATH, 'puppet', 'root')
else:
# Iterate throug the config items, we'll use ConfigParser.set
# to overwrite or create new items as needed
@ -90,25 +100,11 @@ def handle(name, cfg, cloud, log, _args):
puppet_config.set(cfg_name, o, v)
# We got all our config as wanted we'll rename
# the previous puppet.conf and create our new one
conf_old_fn = cloud.paths.join(False,
'/etc/puppet/puppet.conf.old')
util.rename(puppet_conf_fn, conf_old_fn)
puppet_conf_rw = cloud.paths.join(False, '/etc/puppet/puppet.conf')
util.write_file(puppet_conf_rw, puppet_config.stringify())
util.rename(PUPPET_CONF_PATH, "%s.old" % (PUPPET_CONF_PATH))
util.write_file(PUPPET_CONF_PATH, puppet_config.stringify())
# Set puppet to automatically start
if os.path.exists('/etc/default/puppet'):
util.subp(['sed', '-i',
'-e', 's/^START=.*/START=yes/',
'/etc/default/puppet'], capture=False)
elif os.path.exists('/bin/systemctl'):
util.subp(['/bin/systemctl', 'enable', 'puppet.service'],
capture=False)
elif os.path.exists('/sbin/chkconfig'):
util.subp(['/sbin/chkconfig', 'puppet', 'on'], capture=False)
else:
log.warn(("Sorry we do not know how to enable"
" puppet services on this system"))
# Set it up so it autostarts
_autostart_puppet(log)
# Start puppetd
util.subp(['service', 'puppet', 'start'], capture=False)

View File

@ -62,7 +62,7 @@ def get_fs_type(st_dev, path, log):
raise
def handle(name, cfg, cloud, log, args):
def handle(name, cfg, _cloud, log, args):
if len(args) != 0:
resize_root = args[0]
else:
@ -74,11 +74,10 @@ def handle(name, cfg, cloud, log, args):
# TODO(harlowja) is the directory ok to be used??
resize_root_d = util.get_cfg_option_str(cfg, "resize_rootfs_tmp", "/run")
resize_root_d = cloud.paths.join(False, resize_root_d)
util.ensure_dir(resize_root_d)
# TODO(harlowja): allow what is to be resized to be configurable??
resize_what = cloud.paths.join(False, "/")
resize_what = "/"
with util.ExtendedTemporaryFile(prefix="cloudinit.resizefs.",
dir=resize_root_d, delete=True) as tfh:
devpth = tfh.name

View File

@ -71,8 +71,7 @@ def handle(name, cfg, cloud, log, _args):
try:
contents = "%s\n" % (content)
util.write_file(cloud.paths.join(False, filename),
contents, omode=omode)
util.write_file(filename, contents, omode=omode)
except Exception:
util.logexc(log, "Failed to write to %s", filename)

View File

@ -33,6 +33,6 @@ def handle(name, cfg, cloud, log, _args):
cmd = cfg["runcmd"]
try:
content = util.shellify(cmd)
util.write_file(cloud.paths.join(False, out_fn), content, 0700)
util.write_file(out_fn, content, 0700)
except:
util.logexc(log, "Failed to shellify %s into file %s", cmd, out_fn)

View File

@ -34,8 +34,7 @@ def handle(name, cfg, cloud, log, _args):
cloud.distro.install_packages(["salt-minion"])
# Ensure we can configure files at the right dir
config_dir = cloud.paths.join(False, salt_cfg.get("config_dir",
'/etc/salt'))
config_dir = salt_cfg.get("config_dir", '/etc/salt')
util.ensure_dir(config_dir)
# ... and then update the salt configuration
@ -47,8 +46,7 @@ def handle(name, cfg, cloud, log, _args):
# ... copy the key pair if specified
if 'public_key' in salt_cfg and 'private_key' in salt_cfg:
pki_dir = cloud.paths.join(False, salt_cfg.get('pki_dir',
'/etc/salt/pki'))
pki_dir = salt_cfg.get('pki_dir', '/etc/salt/pki')
with util.umask(077):
util.ensure_dir(pki_dir)
pub_name = os.path.join(pki_dir, 'minion.pub')

View File

@ -114,8 +114,7 @@ def handle(_name, cfg, cloud, log, args):
replaced_auth = False
# See: man sshd_config
conf_fn = cloud.paths.join(True, ssh_util.DEF_SSHD_CFG)
old_lines = ssh_util.parse_ssh_config(conf_fn)
old_lines = ssh_util.parse_ssh_config(ssh_util.DEF_SSHD_CFG)
new_lines = []
i = 0
for (i, line) in enumerate(old_lines):
@ -134,8 +133,7 @@ def handle(_name, cfg, cloud, log, args):
pw_auth))
lines = [str(e) for e in new_lines]
ssh_rw_fn = cloud.paths.join(False, ssh_util.DEF_SSHD_CFG)
util.write_file(ssh_rw_fn, "\n".join(lines))
util.write_file(ssh_util.DEF_SSHD_CFG, "\n".join(lines))
try:
cmd = ['service']

View File

@ -59,7 +59,7 @@ def handle(_name, cfg, cloud, log, _args):
# remove the static keys from the pristine image
if cfg.get("ssh_deletekeys", True):
key_pth = cloud.paths.join(False, "/etc/ssh/", "ssh_host_*key*")
key_pth = os.path.join("/etc/ssh/", "ssh_host_*key*")
for f in glob.glob(key_pth):
try:
util.del_file(f)
@ -72,8 +72,7 @@ def handle(_name, cfg, cloud, log, _args):
if key in KEY_2_FILE:
tgt_fn = KEY_2_FILE[key][0]
tgt_perms = KEY_2_FILE[key][1]
util.write_file(cloud.paths.join(False, tgt_fn),
val, tgt_perms)
util.write_file(tgt_fn, val, tgt_perms)
for (priv, pub) in PRIV_2_PUB.iteritems():
if pub in cfg['ssh_keys'] or not priv in cfg['ssh_keys']:
@ -94,7 +93,7 @@ def handle(_name, cfg, cloud, log, _args):
'ssh_genkeytypes',
GENERATE_KEY_NAMES)
for keytype in genkeys:
keyfile = cloud.paths.join(False, KEY_FILE_TPL % (keytype))
keyfile = KEY_FILE_TPL % (keytype)
util.ensure_dir(os.path.dirname(keyfile))
if not os.path.exists(keyfile):
cmd = ['ssh-keygen', '-t', keytype, '-N', '', '-f', keyfile]
@ -118,17 +117,16 @@ def handle(_name, cfg, cloud, log, _args):
cfgkeys = cfg["ssh_authorized_keys"]
keys.extend(cfgkeys)
apply_credentials(keys, user, cloud.paths,
disable_root, disable_root_opts)
apply_credentials(keys, user, disable_root, disable_root_opts)
except:
util.logexc(log, "Applying ssh credentials failed!")
def apply_credentials(keys, user, paths, disable_root, disable_root_opts):
def apply_credentials(keys, user, disable_root, disable_root_opts):
keys = set(keys)
if user:
ssh_util.setup_user_keys(keys, user, '', paths)
ssh_util.setup_user_keys(keys, user, '')
if disable_root:
if not user:
@ -137,4 +135,4 @@ def apply_credentials(keys, user, paths, disable_root, disable_root_opts):
else:
key_prefix = ''
ssh_util.setup_user_keys(keys, 'root', key_prefix, paths)
ssh_util.setup_user_keys(keys, 'root', key_prefix)

View File

@ -97,9 +97,8 @@ def handle(name, cfg, cloud, log, _args):
"logging of ssh fingerprints disabled"), name)
hash_meth = util.get_cfg_option_str(cfg, "authkey_hash", "md5")
extract_func = ssh_util.extract_authorized_keys
(users, _groups) = ds.normalize_users_groups(cfg, cloud.distro)
for (user_name, _cfg) in users.items():
(auth_key_fn, auth_key_entries) = extract_func(user_name, cloud.paths)
_pprint_key_entries(user_name, auth_key_fn,
auth_key_entries, hash_meth)
(key_fn, key_entries) = ssh_util.extract_authorized_keys(user_name)
_pprint_key_entries(user_name, key_fn,
key_entries, hash_meth)

View File

@ -42,8 +42,7 @@ def handle(name, cfg, cloud, log, _args):
raise RuntimeError(("No hosts template could be"
" found for distro %s") % (cloud.distro.name))
out_fn = cloud.paths.join(False, '/etc/hosts')
templater.render_to_file(tpl_fn_name, out_fn,
templater.render_to_file(tpl_fn_name, '/etc/hosts',
{'hostname': hostname, 'fqdn': fqdn})
elif manage_hosts == "localhost":

View File

@ -122,8 +122,7 @@ class Distro(object):
new_etchosts = StringIO()
need_write = False
need_change = True
hosts_ro_fn = self._paths.join(True, "/etc/hosts")
for line in util.load_file(hosts_ro_fn).splitlines():
for line in util.load_file("/etc/hosts").splitlines():
if line.strip().startswith(header):
continue
if not line.strip() or line.strip().startswith("#"):
@ -147,8 +146,7 @@ class Distro(object):
need_write = True
if need_write:
contents = new_etchosts.getvalue()
util.write_file(self._paths.join(False, "/etc/hosts"),
contents, mode=0644)
util.write_file("/etc/hosts", contents, mode=0644)
def _bring_up_interface(self, device_name):
cmd = ['ifup', device_name]
@ -262,7 +260,7 @@ class Distro(object):
# Import SSH keys
if 'ssh_authorized_keys' in kwargs:
keys = set(kwargs['ssh_authorized_keys']) or []
ssh_util.setup_user_keys(keys, name, None, self._paths)
ssh_util.setup_user_keys(keys, name, key_prefix=None)
return True

View File

@ -43,7 +43,7 @@ class Distro(distros.Distro):
def apply_locale(self, locale, out_fn=None):
if not out_fn:
out_fn = self._paths.join(False, '/etc/default/locale')
out_fn = '/etc/default/locale'
util.subp(['locale-gen', locale], capture=False)
util.subp(['update-locale', locale], capture=False)
lines = ["# Created by cloud-init", 'LANG="%s"' % (locale), ""]
@ -54,8 +54,7 @@ class Distro(distros.Distro):
self.package_command('install', pkglist)
def _write_network(self, settings):
net_fn = self._paths.join(False, "/etc/network/interfaces")
util.write_file(net_fn, settings)
util.write_file("/etc/network/interfaces", settings)
return ['all']
def _bring_up_interfaces(self, device_names):
@ -69,12 +68,9 @@ class Distro(distros.Distro):
return distros.Distro._bring_up_interfaces(self, device_names)
def set_hostname(self, hostname):
out_fn = self._paths.join(False, "/etc/hostname")
self._write_hostname(hostname, out_fn)
if out_fn == '/etc/hostname':
# Only do this if we are running in non-adjusted root mode
LOG.debug("Setting hostname to %s", hostname)
util.subp(['hostname', hostname])
self._write_hostname(hostname, "/etc/hostname")
LOG.debug("Setting hostname to %s", hostname)
util.subp(['hostname', hostname])
def _write_hostname(self, hostname, out_fn):
# "" gives trailing newline.
@ -82,16 +78,14 @@ class Distro(distros.Distro):
def update_hostname(self, hostname, prev_fn):
hostname_prev = self._read_hostname(prev_fn)
read_fn = self._paths.join(True, "/etc/hostname")
hostname_in_etc = self._read_hostname(read_fn)
hostname_in_etc = self._read_hostname("/etc/hostname")
update_files = []
if not hostname_prev or hostname_prev != hostname:
update_files.append(prev_fn)
if (not hostname_in_etc or
(hostname_in_etc == hostname_prev and
hostname_in_etc != hostname)):
write_fn = self._paths.join(False, "/etc/hostname")
update_files.append(write_fn)
update_files.append("/etc/hostname")
for fn in update_files:
try:
self._write_hostname(hostname, fn)
@ -103,7 +97,6 @@ class Distro(distros.Distro):
LOG.debug(("%s differs from /etc/hostname."
" Assuming user maintained hostname."), prev_fn)
if "/etc/hostname" in update_files:
# Only do this if we are running in non-adjusted root mode
LOG.debug("Setting hostname to %s", hostname)
util.subp(['hostname', hostname])
@ -130,9 +123,8 @@ class Distro(distros.Distro):
" no file found at %s") % (tz, tz_file))
# "" provides trailing newline during join
tz_lines = ["# Created by cloud-init", str(tz), ""]
tz_fn = self._paths.join(False, "/etc/timezone")
util.write_file(tz_fn, "\n".join(tz_lines))
util.copy(tz_file, self._paths.join(False, "/etc/localtime"))
util.write_file("/etc/timezone", "\n".join(tz_lines))
util.copy(tz_file, "/etc/localtime")
def package_command(self, command, args=None):
e = os.environ.copy()

View File

@ -302,14 +302,10 @@ class Paths(object):
def __init__(self, path_cfgs, ds=None):
self.cfgs = path_cfgs
# Populate all the initial paths
self.cloud_dir = self.join(False,
path_cfgs.get('cloud_dir',
'/var/lib/cloud'))
self.cloud_dir = path_cfgs.get('cloud_dir', '/var/lib/cloud')
self.instance_link = os.path.join(self.cloud_dir, 'instance')
self.boot_finished = os.path.join(self.instance_link, "boot-finished")
self.upstart_conf_d = path_cfgs.get('upstart_dir')
if self.upstart_conf_d:
self.upstart_conf_d = self.join(False, self.upstart_conf_d)
self.seed_dir = os.path.join(self.cloud_dir, 'seed')
# This one isn't joined, since it should just be read-only
template_dir = path_cfgs.get('templates_dir', '/etc/cloud/templates/')
@ -328,29 +324,6 @@ class Paths(object):
# Set when a datasource becomes active
self.datasource = ds
# joins the paths but also appends a read
# or write root if available
def join(self, read_only, *paths):
if read_only:
root = self.cfgs.get('read_root')
else:
root = self.cfgs.get('write_root')
if not paths:
return root
if len(paths) > 1:
joined = os.path.join(*paths)
else:
joined = paths[0]
if root:
pre_joined = joined
# Need to remove any starting '/' since this
# will confuse os.path.join
joined = joined.lstrip("/")
joined = os.path.join(root, joined)
LOG.debug("Translated %s to adjusted path %s (read-only=%s)",
pre_joined, joined, read_only)
return joined
# get_ipath_cur: get the current instance path for an item
def get_ipath_cur(self, name=None):
ipath = self.instance_link

View File

@ -20,8 +20,6 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from email.mime.multipart import MIMEMultipart
import abc
import os

View File

@ -212,17 +212,15 @@ def update_authorized_keys(old_entries, keys):
return '\n'.join(lines)
def users_ssh_info(username, paths):
def users_ssh_info(username):
pw_ent = pwd.getpwnam(username)
if not pw_ent:
if not pw_ent or not pw_ent.pw_dir:
raise RuntimeError("Unable to get ssh info for user %r" % (username))
ssh_dir = paths.join(False, os.path.join(pw_ent.pw_dir, '.ssh'))
return (ssh_dir, pw_ent)
return (os.path.join(pw_ent.pw_dir, '.ssh'), pw_ent)
def extract_authorized_keys(username, paths):
(ssh_dir, pw_ent) = users_ssh_info(username, paths)
sshd_conf_fn = paths.join(True, DEF_SSHD_CFG)
def extract_authorized_keys(username):
(ssh_dir, pw_ent) = users_ssh_info(username)
auth_key_fn = None
with util.SeLinuxGuard(ssh_dir, recursive=True):
try:
@ -231,7 +229,7 @@ def extract_authorized_keys(username, paths):
# The following tokens are defined: %% is replaced by a literal
# '%', %h is replaced by the home directory of the user being
# authenticated and %u is replaced by the username of that user.
ssh_cfg = parse_ssh_config_map(sshd_conf_fn)
ssh_cfg = parse_ssh_config_map(DEF_SSHD_CFG)
auth_key_fn = ssh_cfg.get("authorizedkeysfile", '').strip()
if not auth_key_fn:
auth_key_fn = "%h/.ssh/authorized_keys"
@ -240,7 +238,6 @@ def extract_authorized_keys(username, paths):
auth_key_fn = auth_key_fn.replace("%%", '%')
if not auth_key_fn.startswith('/'):
auth_key_fn = os.path.join(pw_ent.pw_dir, auth_key_fn)
auth_key_fn = paths.join(False, auth_key_fn)
except (IOError, OSError):
# Give up and use a default key filename
auth_key_fn = os.path.join(ssh_dir, 'authorized_keys')
@ -248,14 +245,13 @@ def extract_authorized_keys(username, paths):
" in ssh config"
" from %r, using 'AuthorizedKeysFile' file"
" %r instead"),
sshd_conf_fn, auth_key_fn)
auth_key_entries = parse_authorized_keys(auth_key_fn)
return (auth_key_fn, auth_key_entries)
DEF_SSHD_CFG, auth_key_fn)
return (auth_key_fn, parse_authorized_keys(auth_key_fn))
def setup_user_keys(keys, username, key_prefix, paths):
def setup_user_keys(keys, username, key_prefix):
# Make sure the users .ssh dir is setup accordingly
(ssh_dir, pwent) = users_ssh_info(username, paths)
(ssh_dir, pwent) = users_ssh_info(username)
if not os.path.isdir(ssh_dir):
util.ensure_dir(ssh_dir, mode=0700)
util.chownbyid(ssh_dir, pwent.pw_uid, pwent.pw_gid)
@ -267,7 +263,7 @@ def setup_user_keys(keys, username, key_prefix, paths):
key_entries.append(parser.parse(str(k), def_opt=key_prefix))
# Extract the old and make the new
(auth_key_fn, auth_key_entries) = extract_authorized_keys(username, paths)
(auth_key_fn, auth_key_entries) = extract_authorized_keys(username)
with util.SeLinuxGuard(ssh_dir, recursive=True):
content = update_authorized_keys(auth_key_entries, key_entries)
util.ensure_dir(os.path.dirname(auth_key_fn), mode=0700)

19
pylintrc Normal file
View File

@ -0,0 +1,19 @@
[General]
init-hook='import sys; sys.path.append("tests/")'
[MESSAGES CONTROL]
# See: http://pylint-messages.wikidot.com/all-codes
# W0142: *args and **kwargs are fine.
# W0511: TODOs in code comments are fine.
# W0702: No exception type(s) specified
# W0703: Catch "Exception"
# C0103: Invalid name
# C0111: Missing docstring
disable=W0142,W0511,W0702,W0703,C0103,C0111
[REPORTS]
reports=no
include-ids=yes
[FORMAT]
max-line-length=79

0
tests/__init__.py Normal file
View File

View File

View File

View File

View File

@ -1,14 +1,6 @@
import copy
import os
import sys
top_dir = os.path.join(os.path.dirname(__file__), os.pardir, "helpers.py")
top_dir = os.path.abspath(top_dir)
if os.path.exists(top_dir):
sys.path.insert(0, os.path.dirname(top_dir))
import helpers
from tests.unittests import helpers
import itertools

View File

View File

@ -77,7 +77,7 @@ class TestConfig(MockerTestCase):
"""Test that a single cert gets passed to add_ca_certs."""
config = {"ca-certs": {"trusted": ["CERT1"]}}
self.mock_add(self.paths, ["CERT1"])
self.mock_add(["CERT1"])
self.mock_update()
self.mocker.replay()
@ -87,7 +87,7 @@ class TestConfig(MockerTestCase):
"""Test that multiple certs get passed to add_ca_certs."""
config = {"ca-certs": {"trusted": ["CERT1", "CERT2"]}}
self.mock_add(self.paths, ["CERT1", "CERT2"])
self.mock_add(["CERT1", "CERT2"])
self.mock_update()
self.mocker.replay()
@ -97,7 +97,7 @@ class TestConfig(MockerTestCase):
"""Test remove_defaults works as expected."""
config = {"ca-certs": {"remove-defaults": True}}
self.mock_remove(self.paths)
self.mock_remove()
self.mock_update()
self.mocker.replay()
@ -116,8 +116,8 @@ class TestConfig(MockerTestCase):
"""Test remove_defaults is not called when config value is False."""
config = {"ca-certs": {"remove-defaults": True, "trusted": ["CERT1"]}}
self.mock_remove(self.paths)
self.mock_add(self.paths, ["CERT1"])
self.mock_remove()
self.mock_add(["CERT1"])
self.mock_update()
self.mocker.replay()
@ -136,7 +136,7 @@ class TestAddCaCerts(MockerTestCase):
"""Test that no certificate are written if not provided."""
self.mocker.replace(util.write_file, passthrough=False)
self.mocker.replay()
cc_ca_certs.add_ca_certs(self.paths, [])
cc_ca_certs.add_ca_certs([])
def test_single_cert(self):
"""Test adding a single certificate to the trusted CAs."""
@ -149,7 +149,7 @@ class TestAddCaCerts(MockerTestCase):
"\ncloud-init-ca-certs.crt", omode="ab")
self.mocker.replay()
cc_ca_certs.add_ca_certs(self.paths, [cert])
cc_ca_certs.add_ca_certs([cert])
def test_multiple_certs(self):
"""Test adding multiple certificates to the trusted CAs."""
@ -163,7 +163,7 @@ class TestAddCaCerts(MockerTestCase):
"\ncloud-init-ca-certs.crt", omode="ab")
self.mocker.replay()
cc_ca_certs.add_ca_certs(self.paths, certs)
cc_ca_certs.add_ca_certs(certs)
class TestUpdateCaCerts(MockerTestCase):
@ -198,4 +198,4 @@ class TestRemoveDefaultCaCerts(MockerTestCase):
"ca-certificates ca-certificates/trust_new_crts select no")
self.mocker.replay()
cc_ca_certs.remove_default_ca_certs(self.paths)
cc_ca_certs.remove_default_ca_certs()

View File

View File

@ -1,14 +1,6 @@
import os
import sys
# Allow running this test individually
top_dir = os.path.join(os.path.dirname(__file__), os.pardir, "helpers.py")
top_dir = os.path.abspath(top_dir)
if os.path.exists(top_dir):
sys.path.insert(0, os.path.dirname(top_dir))
import helpers
from tests.unittests import helpers
from cloudinit.settings import (PER_INSTANCE)
from cloudinit import stages

View File

@ -6,23 +6,16 @@ else
files=( "$@" );
fi
RC_FILE="pylintrc"
if [ ! -f $RC_FILE ]; then
RC_FILE="../pylintrc"
fi
cmd=(
pylint
--reports=n
--include-ids=y
--max-line-length=79
--rcfile=$RC_FILE
--disable=R
--disable=I
--disable=W0142 # Used * or ** magic
--disable=W0511 # TODO/FIXME note
--disable=W0702 # No exception type(s) specified
--disable=W0703 # Catch "Exception"
--disable=C0103 # Invalid name
--disable=C0111 # Missing docstring
"${files[@]}"
)