Add initial tests and repo review configs

- Initialize repo for gerrit review.
- Resync charm-helpers.
- Add missing install.real hook.
- Add missing status update hook.
- Add very basic unit tests and amulet tests.
- Set cpu mode default value.

Closes-Bug: #1639020
Closes-Bug: #1638930
Partial-Bug: #1638773
Partial-Bug: #1639943

Change-Id: Ifbf627329ddb25a29d6c0af4b9a34a488bc55629
This commit is contained in:
Ryan Beisner 2016-11-03 02:18:38 +00:00
parent 64fdf10165
commit 3f7a8fad0c
84 changed files with 4806 additions and 27 deletions

2
.gitignore vendored
View File

@ -14,3 +14,5 @@ tests/cirros-*-disk.img
.project
.pydevproject
revision
func-results*
files/id*

4
.gitreview Normal file
View File

@ -0,0 +1,4 @@
[gerrit]
host=review.openstack.org
port=29418
project=openstack/charm-nova-compute-proxy.git

View File

@ -1,9 +1,8 @@
#!/usr/bin/make
PYTHON := /usr/bin/env python
lint:
@flake8 --exclude hooks/charmhelpers hooks
@charm proof
clean:
@rm -fv files/*
bin/charm_helpers_sync.py:
@mkdir -p bin
@ -11,5 +10,5 @@ bin/charm_helpers_sync.py:
> bin/charm_helpers_sync.py
sync: bin/charm_helpers_sync.py
@$(PYTHON) bin/charm_helpers_sync.py -c charm-helpers.yaml
@$(PYTHON) bin/charm_helpers_sync.py -c charm-helpers-hooks.yaml

2
bindep.txt Normal file
View File

@ -0,0 +1,2 @@
bzr [platform:dpkg] # for bzr+lp: python requirements format

View File

@ -14,3 +14,5 @@ include:
- contrib.python.packages
- payload
- contrib.charmsupport
- contrib.hardening|inc=*

View File

@ -81,7 +81,7 @@ options:
of Open vSwitch.
cpu-mode:
type: string
default:
default: none
description: |
Set to 'host-model' to clone the host CPU feature flags; to
'host-passthrough' to use the host CPU model exactly; to 'custom' to
@ -101,4 +101,3 @@ options:
default: 512
description: |
Amount of memory in MB to reserve for the host. Defaults to 512MB.

View File

@ -0,0 +1,38 @@
# Juju charm-helpers hardening library
## Description
This library provides multiple implementations of system and application
hardening that conform to the standards of http://hardening.io/.
Current implementations include:
* OS
* SSH
* MySQL
* Apache
## Requirements
* Juju Charms
## Usage
1. Synchronise this library into your charm and add the harden() decorator
(from contrib.hardening.harden) to any functions or methods you want to use
to trigger hardening of your application/system.
2. Add a config option called 'harden' to your charm config.yaml and set it to
a space-delimited list of hardening modules you want to run e.g. "os ssh"
3. Override any config defaults (contrib.hardening.defaults) by adding a file
called hardening.yaml to your charm root containing the name(s) of the
modules whose settings you want override at root level and then any settings
with overrides e.g.
os:
general:
desktop_enable: True
4. Now just run your charm as usual and hardening will be applied each time the
hook runs.

View File

@ -0,0 +1,13 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -0,0 +1,17 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import path
TEMPLATES_DIR = path.join(path.dirname(__file__), 'templates')

View File

@ -0,0 +1,29 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.core.hookenv import (
log,
DEBUG,
)
from charmhelpers.contrib.hardening.apache.checks import config
def run_apache_checks():
log("Starting Apache hardening checks.", level=DEBUG)
checks = config.get_audits()
for check in checks:
log("Running '%s' check" % (check.__class__.__name__), level=DEBUG)
check.ensure_compliance()
log("Apache hardening checks complete.", level=DEBUG)

View File

@ -0,0 +1,98 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import subprocess
from charmhelpers.core.hookenv import (
log,
INFO,
)
from charmhelpers.contrib.hardening.audits.file import (
FilePermissionAudit,
DirectoryPermissionAudit,
NoReadWriteForOther,
TemplatedFile,
)
from charmhelpers.contrib.hardening.audits.apache import DisabledModuleAudit
from charmhelpers.contrib.hardening.apache import TEMPLATES_DIR
from charmhelpers.contrib.hardening import utils
def get_audits():
"""Get Apache hardening config audits.
:returns: dictionary of audits
"""
if subprocess.call(['which', 'apache2'], stdout=subprocess.PIPE) != 0:
log("Apache server does not appear to be installed on this node - "
"skipping apache hardening", level=INFO)
return []
context = ApacheConfContext()
settings = utils.get_settings('apache')
audits = [
FilePermissionAudit(paths='/etc/apache2/apache2.conf', user='root',
group='root', mode=0o0640),
TemplatedFile(os.path.join(settings['common']['apache_dir'],
'mods-available/alias.conf'),
context,
TEMPLATES_DIR,
mode=0o0755,
user='root',
service_actions=[{'service': 'apache2',
'actions': ['restart']}]),
TemplatedFile(os.path.join(settings['common']['apache_dir'],
'conf-enabled/hardening.conf'),
context,
TEMPLATES_DIR,
mode=0o0640,
user='root',
service_actions=[{'service': 'apache2',
'actions': ['restart']}]),
DirectoryPermissionAudit(settings['common']['apache_dir'],
user='root',
group='root',
mode=0o640),
DisabledModuleAudit(settings['hardening']['modules_to_disable']),
NoReadWriteForOther(settings['common']['apache_dir']),
]
return audits
class ApacheConfContext(object):
"""Defines the set of key/value pairs to set in a apache config file.
This context, when called, will return a dictionary containing the
key/value pairs of setting to specify in the
/etc/apache/conf-enabled/hardening.conf file.
"""
def __call__(self):
settings = utils.get_settings('apache')
ctxt = settings['hardening']
out = subprocess.check_output(['apache2', '-v'])
ctxt['apache_version'] = re.search(r'.+version: Apache/(.+?)\s.+',
out).group(1)
ctxt['apache_icondir'] = '/usr/share/apache2/icons/'
ctxt['traceenable'] = settings['hardening']['traceenable']
return ctxt

View File

@ -0,0 +1,31 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
<IfModule alias_module>
#
# Aliases: Add here as many aliases as you need (with no limit). The format is
# Alias fakename realname
#
# Note that if you include a trailing / on fakename then the server will
# require it to be present in the URL. So "/icons" isn't aliased in this
# example, only "/icons/". If the fakename is slash-terminated, then the
# realname must also be slash terminated, and if the fakename omits the
# trailing slash, the realname must also omit it.
#
# We include the /icons/ alias for FancyIndexed directory listings. If
# you do not use FancyIndexing, you may comment this out.
#
Alias /icons/ "{{ apache_icondir }}/"
<Directory "{{ apache_icondir }}">
Options -Indexes -MultiViews -FollowSymLinks
AllowOverride None
{% if apache_version == '2.4' -%}
Require all granted
{% else -%}
Order allow,deny
Allow from all
{% endif %}
</Directory>
</IfModule>

View File

@ -0,0 +1,18 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
<Location / >
<LimitExcept {{ allowed_http_methods }} >
# http://httpd.apache.org/docs/2.4/upgrading.html
{% if apache_version > '2.2' -%}
Require all granted
{% else -%}
Order Allow,Deny
Deny from all
{% endif %}
</LimitExcept>
</Location>
TraceEnable {{ traceenable }}

View File

@ -0,0 +1,61 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class BaseAudit(object): # NO-QA
"""Base class for hardening checks.
The lifecycle of a hardening check is to first check to see if the system
is in compliance for the specified check. If it is not in compliance, the
check method will return a value which will be supplied to the.
"""
def __init__(self, *args, **kwargs):
self.unless = kwargs.get('unless', None)
super(BaseAudit, self).__init__()
def ensure_compliance(self):
"""Checks to see if the current hardening check is in compliance or
not.
If the check that is performed is not in compliance, then an exception
should be raised.
"""
pass
def _take_action(self):
"""Determines whether to perform the action or not.
Checks whether or not an action should be taken. This is determined by
the truthy value for the unless parameter. If unless is a callback
method, it will be invoked with no parameters in order to determine
whether or not the action should be taken. Otherwise, the truthy value
of the unless attribute will determine if the action should be
performed.
"""
# Do the action if there isn't an unless override.
if self.unless is None:
return True
# Invoke the callback if there is one.
if hasattr(self.unless, '__call__'):
results = self.unless()
if results:
return False
else:
return True
if self.unless:
return False
else:
return True

View File

@ -0,0 +1,98 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import subprocess
from six import string_types
from charmhelpers.core.hookenv import (
log,
INFO,
ERROR,
)
from charmhelpers.contrib.hardening.audits import BaseAudit
class DisabledModuleAudit(BaseAudit):
"""Audits Apache2 modules.
Determines if the apache2 modules are enabled. If the modules are enabled
then they are removed in the ensure_compliance.
"""
def __init__(self, modules):
if modules is None:
self.modules = []
elif isinstance(modules, string_types):
self.modules = [modules]
else:
self.modules = modules
def ensure_compliance(self):
"""Ensures that the modules are not loaded."""
if not self.modules:
return
try:
loaded_modules = self._get_loaded_modules()
non_compliant_modules = []
for module in self.modules:
if module in loaded_modules:
log("Module '%s' is enabled but should not be." %
(module), level=INFO)
non_compliant_modules.append(module)
if len(non_compliant_modules) == 0:
return
for module in non_compliant_modules:
self._disable_module(module)
self._restart_apache()
except subprocess.CalledProcessError as e:
log('Error occurred auditing apache module compliance. '
'This may have been already reported. '
'Output is: %s' % e.output, level=ERROR)
@staticmethod
def _get_loaded_modules():
"""Returns the modules which are enabled in Apache."""
output = subprocess.check_output(['apache2ctl', '-M'])
modules = []
for line in output.strip().split():
# Each line of the enabled module output looks like:
# module_name (static|shared)
# Plus a header line at the top of the output which is stripped
# out by the regex.
matcher = re.search(r'^ (\S*)', line)
if matcher:
modules.append(matcher.group(1))
return modules
@staticmethod
def _disable_module(module):
"""Disables the specified module in Apache."""
try:
subprocess.check_call(['a2dismod', module])
except subprocess.CalledProcessError as e:
# Note: catch error here to allow the attempt of disabling
# multiple modules in one go rather than failing after the
# first module fails.
log('Error occurred disabling module %s. '
'Output is: %s' % (module, e.output), level=ERROR)
@staticmethod
def _restart_apache():
"""Restarts the apache process"""
subprocess.check_output(['service', 'apache2', 'restart'])

View File

@ -0,0 +1,103 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import # required for external apt import
from apt import apt_pkg
from six import string_types
from charmhelpers.fetch import (
apt_cache,
apt_purge
)
from charmhelpers.core.hookenv import (
log,
DEBUG,
WARNING,
)
from charmhelpers.contrib.hardening.audits import BaseAudit
class AptConfig(BaseAudit):
def __init__(self, config, **kwargs):
self.config = config
def verify_config(self):
apt_pkg.init()
for cfg in self.config:
value = apt_pkg.config.get(cfg['key'], cfg.get('default', ''))
if value and value != cfg['expected']:
log("APT config '%s' has unexpected value '%s' "
"(expected='%s')" %
(cfg['key'], value, cfg['expected']), level=WARNING)
def ensure_compliance(self):
self.verify_config()
class RestrictedPackages(BaseAudit):
"""Class used to audit restricted packages on the system."""
def __init__(self, pkgs, **kwargs):
super(RestrictedPackages, self).__init__(**kwargs)
if isinstance(pkgs, string_types) or not hasattr(pkgs, '__iter__'):
self.pkgs = [pkgs]
else:
self.pkgs = pkgs
def ensure_compliance(self):
cache = apt_cache()
for p in self.pkgs:
if p not in cache:
continue
pkg = cache[p]
if not self.is_virtual_package(pkg):
if not pkg.current_ver:
log("Package '%s' is not installed." % pkg.name,
level=DEBUG)
continue
else:
log("Restricted package '%s' is installed" % pkg.name,
level=WARNING)
self.delete_package(cache, pkg)
else:
log("Checking restricted virtual package '%s' provides" %
pkg.name, level=DEBUG)
self.delete_package(cache, pkg)
def delete_package(self, cache, pkg):
"""Deletes the package from the system.
Deletes the package form the system, properly handling virtual
packages.
:param cache: the apt cache
:param pkg: the package to remove
"""
if self.is_virtual_package(pkg):
log("Package '%s' appears to be virtual - purging provides" %
pkg.name, level=DEBUG)
for _p in pkg.provides_list:
self.delete_package(cache, _p[2].parent_pkg)
elif not pkg.current_ver:
log("Package '%s' not installed" % pkg.name, level=DEBUG)
return
else:
log("Purging package '%s'" % pkg.name, level=DEBUG)
apt_purge(pkg.name)
def is_virtual_package(self, pkg):
return pkg.has_provides and not pkg.has_versions

View File

@ -0,0 +1,550 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grp
import os
import pwd
import re
from subprocess import (
CalledProcessError,
check_output,
check_call,
)
from traceback import format_exc
from six import string_types
from stat import (
S_ISGID,
S_ISUID
)
from charmhelpers.core.hookenv import (
log,
DEBUG,
INFO,
WARNING,
ERROR,
)
from charmhelpers.core import unitdata
from charmhelpers.core.host import file_hash
from charmhelpers.contrib.hardening.audits import BaseAudit
from charmhelpers.contrib.hardening.templating import (
get_template_path,
render_and_write,
)
from charmhelpers.contrib.hardening import utils
class BaseFileAudit(BaseAudit):
"""Base class for file audits.
Provides api stubs for compliance check flow that must be used by any class
that implemented this one.
"""
def __init__(self, paths, always_comply=False, *args, **kwargs):
"""
:param paths: string path of list of paths of files we want to apply
compliance checks are criteria to.
:param always_comply: if true compliance criteria is always applied
else compliance is skipped for non-existent
paths.
"""
super(BaseFileAudit, self).__init__(*args, **kwargs)
self.always_comply = always_comply
if isinstance(paths, string_types) or not hasattr(paths, '__iter__'):
self.paths = [paths]
else:
self.paths = paths
def ensure_compliance(self):
"""Ensure that the all registered files comply to registered criteria.
"""
for p in self.paths:
if os.path.exists(p):
if self.is_compliant(p):
continue
log('File %s is not in compliance.' % p, level=INFO)
else:
if not self.always_comply:
log("Non-existent path '%s' - skipping compliance check"
% (p), level=INFO)
continue
if self._take_action():
log("Applying compliance criteria to '%s'" % (p), level=INFO)
self.comply(p)
def is_compliant(self, path):
"""Audits the path to see if it is compliance.
:param path: the path to the file that should be checked.
"""
raise NotImplementedError
def comply(self, path):
"""Enforces the compliance of a path.
:param path: the path to the file that should be enforced.
"""
raise NotImplementedError
@classmethod
def _get_stat(cls, path):
"""Returns the Posix st_stat information for the specified file path.
:param path: the path to get the st_stat information for.
:returns: an st_stat object for the path or None if the path doesn't
exist.
"""
return os.stat(path)
class FilePermissionAudit(BaseFileAudit):
"""Implements an audit for file permissions and ownership for a user.
This class implements functionality that ensures that a specific user/group
will own the file(s) specified and that the permissions specified are
applied properly to the file.
"""
def __init__(self, paths, user, group=None, mode=0o600, **kwargs):
self.user = user
self.group = group
self.mode = mode
super(FilePermissionAudit, self).__init__(paths, user, group, mode,
**kwargs)
@property
def user(self):
return self._user
@user.setter
def user(self, name):
try:
user = pwd.getpwnam(name)
except KeyError:
log('Unknown user %s' % name, level=ERROR)
user = None
self._user = user
@property
def group(self):
return self._group
@group.setter
def group(self, name):
try:
group = None
if name:
group = grp.getgrnam(name)
else:
group = grp.getgrgid(self.user.pw_gid)
except KeyError:
log('Unknown group %s' % name, level=ERROR)
self._group = group
def is_compliant(self, path):
"""Checks if the path is in compliance.
Used to determine if the path specified meets the necessary
requirements to be in compliance with the check itself.
:param path: the file path to check
:returns: True if the path is compliant, False otherwise.
"""
stat = self._get_stat(path)
user = self.user
group = self.group
compliant = True
if stat.st_uid != user.pw_uid or stat.st_gid != group.gr_gid:
log('File %s is not owned by %s:%s.' % (path, user.pw_name,
group.gr_name),
level=INFO)
compliant = False
# POSIX refers to the st_mode bits as corresponding to both the
# file type and file permission bits, where the least significant 12
# bits (o7777) are the suid (11), sgid (10), sticky bits (9), and the
# file permission bits (8-0)
perms = stat.st_mode & 0o7777
if perms != self.mode:
log('File %s has incorrect permissions, currently set to %s' %
(path, oct(stat.st_mode & 0o7777)), level=INFO)
compliant = False
return compliant
def comply(self, path):
"""Issues a chown and chmod to the file paths specified."""
utils.ensure_permissions(path, self.user.pw_name, self.group.gr_name,
self.mode)
class DirectoryPermissionAudit(FilePermissionAudit):
"""Performs a permission check for the specified directory path."""
def __init__(self, paths, user, group=None, mode=0o600,
recursive=True, **kwargs):
super(DirectoryPermissionAudit, self).__init__(paths, user, group,
mode, **kwargs)
self.recursive = recursive
def is_compliant(self, path):
"""Checks if the directory is compliant.
Used to determine if the path specified and all of its children
directories are in compliance with the check itself.
:param path: the directory path to check
:returns: True if the directory tree is compliant, otherwise False.
"""
if not os.path.isdir(path):
log('Path specified %s is not a directory.' % path, level=ERROR)
raise ValueError("%s is not a directory." % path)
if not self.recursive:
return super(DirectoryPermissionAudit, self).is_compliant(path)
compliant = True
for root, dirs, _ in os.walk(path):
if len(dirs) > 0:
continue
if not super(DirectoryPermissionAudit, self).is_compliant(root):
compliant = False
continue
return compliant
def comply(self, path):
for root, dirs, _ in os.walk(path):
if len(dirs) > 0:
super(DirectoryPermissionAudit, self).comply(root)
class ReadOnly(BaseFileAudit):
"""Audits that files and folders are read only."""
def __init__(self, paths, *args, **kwargs):
super(ReadOnly, self).__init__(paths=paths, *args, **kwargs)
def is_compliant(self, path):
try:
output = check_output(['find', path, '-perm', '-go+w',
'-type', 'f']).strip()
# The find above will find any files which have permission sets
# which allow too broad of write access. As such, the path is
# compliant if there is no output.
if output:
return False
return True
except CalledProcessError as e:
log('Error occurred checking finding writable files for %s. '
'Error information is: command %s failed with returncode '
'%d and output %s.\n%s' % (path, e.cmd, e.returncode, e.output,
format_exc(e)), level=ERROR)
return False
def comply(self, path):
try:
check_output(['chmod', 'go-w', '-R', path])
except CalledProcessError as e:
log('Error occurred removing writeable permissions for %s. '
'Error information is: command %s failed with returncode '
'%d and output %s.\n%s' % (path, e.cmd, e.returncode, e.output,
format_exc(e)), level=ERROR)
class NoReadWriteForOther(BaseFileAudit):
"""Ensures that the files found under the base path are readable or
writable by anyone other than the owner or the group.
"""
def __init__(self, paths):
super(NoReadWriteForOther, self).__init__(paths)
def is_compliant(self, path):
try:
cmd = ['find', path, '-perm', '-o+r', '-type', 'f', '-o',
'-perm', '-o+w', '-type', 'f']
output = check_output(cmd).strip()
# The find above here will find any files which have read or
# write permissions for other, meaning there is too broad of access
# to read/write the file. As such, the path is compliant if there's
# no output.
if output:
return False
return True
except CalledProcessError as e:
log('Error occurred while finding files which are readable or '
'writable to the world in %s. '
'Command output is: %s.' % (path, e.output), level=ERROR)
def comply(self, path):
try:
check_output(['chmod', '-R', 'o-rw', path])
except CalledProcessError as e:
log('Error occurred attempting to change modes of files under '
'path %s. Output of command is: %s' % (path, e.output))
class NoSUIDSGIDAudit(BaseFileAudit):
"""Audits that specified files do not have SUID/SGID bits set."""
def __init__(self, paths, *args, **kwargs):
super(NoSUIDSGIDAudit, self).__init__(paths=paths, *args, **kwargs)
def is_compliant(self, path):
stat = self._get_stat(path)
if (stat.st_mode & (S_ISGID | S_ISUID)) != 0:
return False
return True
def comply(self, path):
try:
log('Removing suid/sgid from %s.' % path, level=DEBUG)
check_output(['chmod', '-s', path])
except CalledProcessError as e:
log('Error occurred removing suid/sgid from %s.'
'Error information is: command %s failed with returncode '
'%d and output %s.\n%s' % (path, e.cmd, e.returncode, e.output,
format_exc(e)), level=ERROR)
class TemplatedFile(BaseFileAudit):
"""The TemplatedFileAudit audits the contents of a templated file.
This audit renders a file from a template, sets the appropriate file
permissions, then generates a hashsum with which to check the content
changed.
"""
def __init__(self, path, context, template_dir, mode, user='root',
group='root', service_actions=None, **kwargs):
self.context = context
self.user = user
self.group = group
self.mode = mode
self.template_dir = template_dir
self.service_actions = service_actions
super(TemplatedFile, self).__init__(paths=path, always_comply=True,
**kwargs)
def is_compliant(self, path):
"""Determines if the templated file is compliant.
A templated file is only compliant if it has not changed (as
determined by its sha256 hashsum) AND its file permissions are set
appropriately.
:param path: the path to check compliance.
"""
same_templates = self.templates_match(path)
same_content = self.contents_match(path)
same_permissions = self.permissions_match(path)
if same_content and same_permissions and same_templates:
return True
return False
def run_service_actions(self):
"""Run any actions on services requested."""
if not self.service_actions:
return
for svc_action in self.service_actions:
name = svc_action['service']
actions = svc_action['actions']
log("Running service '%s' actions '%s'" % (name, actions),
level=DEBUG)
for action in actions:
cmd = ['service', name, action]
try:
check_call(cmd)
except CalledProcessError as exc:
log("Service name='%s' action='%s' failed - %s" %
(name, action, exc), level=WARNING)
def comply(self, path):
"""Ensures the contents and the permissions of the file.
:param path: the path to correct
"""
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
self.pre_write()
render_and_write(self.template_dir, path, self.context())
utils.ensure_permissions(path, self.user, self.group, self.mode)
self.run_service_actions()
self.save_checksum(path)
self.post_write()
def pre_write(self):
"""Invoked prior to writing the template."""
pass
def post_write(self):
"""Invoked after writing the template."""
pass
def templates_match(self, path):
"""Determines if the template files are the same.
The template file equality is determined by the hashsum of the
template files themselves. If there is no hashsum, then the content
cannot be sure to be the same so treat it as if they changed.
Otherwise, return whether or not the hashsums are the same.
:param path: the path to check
:returns: boolean
"""
template_path = get_template_path(self.template_dir, path)
key = 'hardening:template:%s' % template_path
template_checksum = file_hash(template_path)
kv = unitdata.kv()
stored_tmplt_checksum = kv.get(key)
if not stored_tmplt_checksum:
kv.set(key, template_checksum)
kv.flush()
log('Saved template checksum for %s.' % template_path,
level=DEBUG)
# Since we don't have a template checksum, then assume it doesn't
# match and return that the template is different.
return False
elif stored_tmplt_checksum != template_checksum:
kv.set(key, template_checksum)
kv.flush()
log('Updated template checksum for %s.' % template_path,
level=DEBUG)
return False
# Here the template hasn't changed based upon the calculated
# checksum of the template and what was previously stored.
return True
def contents_match(self, path):
"""Determines if the file content is the same.
This is determined by comparing hashsum of the file contents and
the saved hashsum. If there is no hashsum, then the content cannot
be sure to be the same so treat them as if they are not the same.
Otherwise, return True if the hashsums are the same, False if they
are not the same.
:param path: the file to check.
"""
checksum = file_hash(path)
kv = unitdata.kv()
stored_checksum = kv.get('hardening:%s' % path)
if not stored_checksum:
# If the checksum hasn't been generated, return False to ensure
# the file is written and the checksum stored.
log('Checksum for %s has not been calculated.' % path, level=DEBUG)
return False
elif stored_checksum != checksum:
log('Checksum mismatch for %s.' % path, level=DEBUG)
return False
return True
def permissions_match(self, path):
"""Determines if the file owner and permissions match.
:param path: the path to check.
"""
audit = FilePermissionAudit(path, self.user, self.group, self.mode)
return audit.is_compliant(path)
def save_checksum(self, path):
"""Calculates and saves the checksum for the path specified.
:param path: the path of the file to save the checksum.
"""
checksum = file_hash(path)
kv = unitdata.kv()
kv.set('hardening:%s' % path, checksum)
kv.flush()
class DeletedFile(BaseFileAudit):
"""Audit to ensure that a file is deleted."""
def __init__(self, paths):
super(DeletedFile, self).__init__(paths)
def is_compliant(self, path):
return not os.path.exists(path)
def comply(self, path):
os.remove(path)
class FileContentAudit(BaseFileAudit):
"""Audit the contents of a file."""
def __init__(self, paths, cases, **kwargs):
# Cases we expect to pass
self.pass_cases = cases.get('pass', [])
# Cases we expect to fail
self.fail_cases = cases.get('fail', [])
super(FileContentAudit, self).__init__(paths, **kwargs)
def is_compliant(self, path):
"""
Given a set of content matching cases i.e. tuple(regex, bool) where
bool value denotes whether or not regex is expected to match, check that
all cases match as expected with the contents of the file. Cases can be
expected to pass of fail.
:param path: Path of file to check.
:returns: Boolean value representing whether or not all cases are
found to be compliant.
"""
log("Auditing contents of file '%s'" % (path), level=DEBUG)
with open(path, 'r') as fd:
contents = fd.read()
matches = 0
for pattern in self.pass_cases:
key = re.compile(pattern, flags=re.MULTILINE)
results = re.search(key, contents)
if results:
matches += 1
else:
log("Pattern '%s' was expected to pass but instead it failed"
% (pattern), level=WARNING)
for pattern in self.fail_cases:
key = re.compile(pattern, flags=re.MULTILINE)
results = re.search(key, contents)
if not results:
matches += 1
else:
log("Pattern '%s' was expected to fail but instead it passed"
% (pattern), level=WARNING)
total = len(self.pass_cases) + len(self.fail_cases)
log("Checked %s cases and %s passed" % (total, matches), level=DEBUG)
return matches == total
def comply(self, *args, **kwargs):
"""NOOP since we just issue warnings. This is to avoid the
NotImplememtedError.
"""
log("Not applying any compliance criteria, only checks.", level=INFO)

View File

@ -0,0 +1,13 @@
# NOTE: this file contains the default configuration for the 'apache' hardening
# code. If you want to override any settings you must add them to a file
# called hardening.yaml in the root directory of your charm using the
# name 'apache' as the root key followed by any of the following with new
# values.
common:
apache_dir: '/etc/apache2'
hardening:
traceenable: 'off'
allowed_http_methods: "GET POST"
modules_to_disable: [ cgi, cgid ]

View File

@ -0,0 +1,9 @@
# NOTE: this schema must contain all valid keys from it's associated defaults
# file. It is used to validate user-provided overrides.
common:
apache_dir:
traceenable:
hardening:
allowed_http_methods:
modules_to_disable:

View File

@ -0,0 +1,38 @@
# NOTE: this file contains the default configuration for the 'mysql' hardening
# code. If you want to override any settings you must add them to a file
# called hardening.yaml in the root directory of your charm using the
# name 'mysql' as the root key followed by any of the following with new
# values.
hardening:
mysql-conf: /etc/mysql/my.cnf
hardening-conf: /etc/mysql/conf.d/hardening.cnf
security:
# @see http://www.symantec.com/connect/articles/securing-mysql-step-step
# @see http://dev.mysql.com/doc/refman/5.7/en/server-options.html#option_mysqld_chroot
chroot: None
# @see http://dev.mysql.com/doc/refman/5.7/en/server-options.html#option_mysqld_safe-user-create
safe-user-create: 1
# @see http://dev.mysql.com/doc/refman/5.7/en/server-options.html#option_mysqld_secure-auth
secure-auth: 1
# @see http://dev.mysql.com/doc/refman/5.7/en/server-options.html#option_mysqld_symbolic-links
skip-symbolic-links: 1
# @see http://dev.mysql.com/doc/refman/5.7/en/server-options.html#option_mysqld_skip-show-database
skip-show-database: True
# @see http://dev.mysql.com/doc/refman/5.7/en/server-system-variables.html#sysvar_local_infile
local-infile: 0
# @see https://dev.mysql.com/doc/refman/5.7/en/server-options.html#option_mysqld_allow-suspicious-udfs
allow-suspicious-udfs: 0
# @see https://dev.mysql.com/doc/refman/5.7/en/server-system-variables.html#sysvar_automatic_sp_privileges
automatic-sp-privileges: 0
# @see https://dev.mysql.com/doc/refman/5.7/en/server-options.html#option_mysqld_secure-file-priv
secure-file-priv: /tmp

View File

@ -0,0 +1,15 @@
# NOTE: this schema must contain all valid keys from it's associated defaults
# file. It is used to validate user-provided overrides.
hardening:
mysql-conf:
hardening-conf:
security:
chroot:
safe-user-create:
secure-auth:
skip-symbolic-links:
skip-show-database:
local-infile:
allow-suspicious-udfs:
automatic-sp-privileges:
secure-file-priv:

View File

@ -0,0 +1,67 @@
# NOTE: this file contains the default configuration for the 'os' hardening
# code. If you want to override any settings you must add them to a file
# called hardening.yaml in the root directory of your charm using the
# name 'os' as the root key followed by any of the following with new
# values.
general:
desktop_enable: False # (type:boolean)
environment:
extra_user_paths: []
umask: 027
root_path: /
auth:
pw_max_age: 60
# discourage password cycling
pw_min_age: 7
retries: 5
lockout_time: 600
timeout: 60
allow_homeless: False # (type:boolean)
pam_passwdqc_enable: True # (type:boolean)
pam_passwdqc_options: 'min=disabled,disabled,16,12,8'
root_ttys:
console
tty1
tty2
tty3
tty4
tty5
tty6
uid_min: 1000
gid_min: 1000
sys_uid_min: 100
sys_uid_max: 999
sys_gid_min: 100
sys_gid_max: 999
chfn_restrict:
security:
users_allow: []
suid_sgid_enforce: True # (type:boolean)
# user-defined blacklist and whitelist
suid_sgid_blacklist: []
suid_sgid_whitelist: []
# if this is True, remove any suid/sgid bits from files that were not in the whitelist
suid_sgid_dry_run_on_unknown: False # (type:boolean)
suid_sgid_remove_from_unknown: False # (type:boolean)
# remove packages with known issues
packages_clean: True # (type:boolean)
packages_list:
xinetd
inetd
ypserv
telnet-server
rsh-server
rsync
kernel_enable_module_loading: True # (type:boolean)
kernel_enable_core_dump: False # (type:boolean)
sysctl:
kernel_secure_sysrq: 244 # 4 + 16 + 32 + 64 + 128
kernel_enable_sysrq: False # (type:boolean)
forwarding: False # (type:boolean)
ipv6_enable: False # (type:boolean)
arp_restricted: True # (type:boolean)

View File

@ -0,0 +1,42 @@
# NOTE: this schema must contain all valid keys from it's associated defaults
# file. It is used to validate user-provided overrides.
general:
desktop_enable:
environment:
extra_user_paths:
umask:
root_path:
auth:
pw_max_age:
pw_min_age:
retries:
lockout_time:
timeout:
allow_homeless:
pam_passwdqc_enable:
pam_passwdqc_options:
root_ttys:
uid_min:
gid_min:
sys_uid_min:
sys_uid_max:
sys_gid_min:
sys_gid_max:
chfn_restrict:
security:
users_allow:
suid_sgid_enforce:
suid_sgid_blacklist:
suid_sgid_whitelist:
suid_sgid_dry_run_on_unknown:
suid_sgid_remove_from_unknown:
packages_clean:
packages_list:
kernel_enable_module_loading:
kernel_enable_core_dump:
sysctl:
kernel_secure_sysrq:
kernel_enable_sysrq:
forwarding:
ipv6_enable:
arp_restricted:

View File

@ -0,0 +1,49 @@
# NOTE: this file contains the default configuration for the 'ssh' hardening
# code. If you want to override any settings you must add them to a file
# called hardening.yaml in the root directory of your charm using the
# name 'ssh' as the root key followed by any of the following with new
# values.
common:
service_name: 'ssh'
network_ipv6_enable: False # (type:boolean)
ports: [22]
remote_hosts: []
client:
package: 'openssh-client'
cbc_required: False # (type:boolean)
weak_hmac: False # (type:boolean)
weak_kex: False # (type:boolean)
roaming: False
password_authentication: 'no'
server:
host_key_files: ['/etc/ssh/ssh_host_rsa_key', '/etc/ssh/ssh_host_dsa_key',
'/etc/ssh/ssh_host_ecdsa_key']
cbc_required: False # (type:boolean)
weak_hmac: False # (type:boolean)
weak_kex: False # (type:boolean)
allow_root_with_key: False # (type:boolean)
allow_tcp_forwarding: 'no'
allow_agent_forwarding: 'no'
allow_x11_forwarding: 'no'
use_privilege_separation: 'sandbox'
listen_to: ['0.0.0.0']
use_pam: 'no'
package: 'openssh-server'
password_authentication: 'no'
alive_interval: '600'
alive_count: '3'
sftp_enable: False # (type:boolean)
sftp_group: 'sftponly'
sftp_chroot: '/home/%u'
deny_users: []
allow_users: []
deny_groups: []
allow_groups: []
print_motd: 'no'
print_last_log: 'no'
use_dns: 'no'
max_auth_tries: 2
max_sessions: 10

View File

@ -0,0 +1,42 @@
# NOTE: this schema must contain all valid keys from it's associated defaults
# file. It is used to validate user-provided overrides.
common:
service_name:
network_ipv6_enable:
ports:
remote_hosts:
client:
package:
cbc_required:
weak_hmac:
weak_kex:
roaming:
password_authentication:
server:
host_key_files:
cbc_required:
weak_hmac:
weak_kex:
allow_root_with_key:
allow_tcp_forwarding:
allow_agent_forwarding:
allow_x11_forwarding:
use_privilege_separation:
listen_to:
use_pam:
package:
password_authentication:
alive_interval:
alive_count:
sftp_enable:
sftp_group:
sftp_chroot:
deny_users:
allow_users:
deny_groups:
allow_groups:
print_motd:
print_last_log:
use_dns:
max_auth_tries:
max_sessions:

View File

@ -0,0 +1,82 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from collections import OrderedDict
from charmhelpers.core.hookenv import (
config,
log,
DEBUG,
WARNING,
)
from charmhelpers.contrib.hardening.host.checks import run_os_checks
from charmhelpers.contrib.hardening.ssh.checks import run_ssh_checks
from charmhelpers.contrib.hardening.mysql.checks import run_mysql_checks
from charmhelpers.contrib.hardening.apache.checks import run_apache_checks
def harden(overrides=None):
"""Hardening decorator.
This is the main entry point for running the hardening stack. In order to
run modules of the stack you must add this decorator to charm hook(s) and
ensure that your charm config.yaml contains the 'harden' option set to
one or more of the supported modules. Setting these will cause the
corresponding hardening code to be run when the hook fires.
This decorator can and should be applied to more than one hook or function
such that hardening modules are called multiple times. This is because
subsequent calls will perform auditing checks that will report any changes
to resources hardened by the first run (and possibly perform compliance
actions as a result of any detected infractions).
:param overrides: Optional list of stack modules used to override those
provided with 'harden' config.
:returns: Returns value returned by decorated function once executed.
"""
def _harden_inner1(f):
log("Hardening function '%s'" % (f.__name__), level=DEBUG)
def _harden_inner2(*args, **kwargs):
RUN_CATALOG = OrderedDict([('os', run_os_checks),
('ssh', run_ssh_checks),
('mysql', run_mysql_checks),
('apache', run_apache_checks)])
enabled = overrides or (config("harden") or "").split()
if enabled:
modules_to_run = []
# modules will always be performed in the following order
for module, func in six.iteritems(RUN_CATALOG):
if module in enabled:
enabled.remove(module)
modules_to_run.append(func)
if enabled:
log("Unknown hardening modules '%s' - ignoring" %
(', '.join(enabled)), level=WARNING)
for hardener in modules_to_run:
log("Executing hardening module '%s'" %
(hardener.__name__), level=DEBUG)
hardener()
else:
log("No hardening applied to '%s'" % (f.__name__), level=DEBUG)
return f(*args, **kwargs)
return _harden_inner2
return _harden_inner1

View File

@ -0,0 +1,17 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import path
TEMPLATES_DIR = path.join(path.dirname(__file__), 'templates')

View File

@ -0,0 +1,48 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.core.hookenv import (
log,
DEBUG,
)
from charmhelpers.contrib.hardening.host.checks import (
apt,
limits,
login,
minimize_access,
pam,
profile,
securetty,
suid_sgid,
sysctl
)
def run_os_checks():
log("Starting OS hardening checks.", level=DEBUG)
checks = apt.get_audits()
checks.extend(limits.get_audits())
checks.extend(login.get_audits())
checks.extend(minimize_access.get_audits())
checks.extend(pam.get_audits())
checks.extend(profile.get_audits())
checks.extend(securetty.get_audits())
checks.extend(suid_sgid.get_audits())
checks.extend(sysctl.get_audits())
for check in checks:
log("Running '%s' check" % (check.__class__.__name__), level=DEBUG)
check.ensure_compliance()
log("OS hardening checks complete.", level=DEBUG)

View File

@ -0,0 +1,37 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.contrib.hardening.utils import get_settings
from charmhelpers.contrib.hardening.audits.apt import (
AptConfig,
RestrictedPackages,
)
def get_audits():
"""Get OS hardening apt audits.
:returns: dictionary of audits
"""
audits = [AptConfig([{'key': 'APT::Get::AllowUnauthenticated',
'expected': 'false'}])]
settings = get_settings('os')
clean_packages = settings['security']['packages_clean']
if clean_packages:
security_packages = settings['security']['packages_list']
if security_packages:
audits.append(RestrictedPackages(security_packages))
return audits

View File

@ -0,0 +1,53 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.contrib.hardening.audits.file import (
DirectoryPermissionAudit,
TemplatedFile,
)
from charmhelpers.contrib.hardening.host import TEMPLATES_DIR
from charmhelpers.contrib.hardening import utils
def get_audits():
"""Get OS hardening security limits audits.
:returns: dictionary of audits
"""
audits = []
settings = utils.get_settings('os')
# Ensure that the /etc/security/limits.d directory is only writable
# by the root user, but others can execute and read.
audits.append(DirectoryPermissionAudit('/etc/security/limits.d',
user='root', group='root',
mode=0o755))
# If core dumps are not enabled, then don't allow core dumps to be
# created as they may contain sensitive information.
if not settings['security']['kernel_enable_core_dump']:
audits.append(TemplatedFile('/etc/security/limits.d/10.hardcore.conf',
SecurityLimitsContext(),
template_dir=TEMPLATES_DIR,
user='root', group='root', mode=0o0440))
return audits
class SecurityLimitsContext(object):
def __call__(self):
settings = utils.get_settings('os')
ctxt = {'disable_core_dump':
not settings['security']['kernel_enable_core_dump']}
return ctxt

View File

@ -0,0 +1,65 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from six import string_types
from charmhelpers.contrib.hardening.audits.file import TemplatedFile
from charmhelpers.contrib.hardening.host import TEMPLATES_DIR
from charmhelpers.contrib.hardening import utils
def get_audits():
"""Get OS hardening login.defs audits.
:returns: dictionary of audits
"""
audits = [TemplatedFile('/etc/login.defs', LoginContext(),
template_dir=TEMPLATES_DIR,
user='root', group='root', mode=0o0444)]
return audits
class LoginContext(object):
def __call__(self):
settings = utils.get_settings('os')
# Octal numbers in yaml end up being turned into decimal,
# so check if the umask is entered as a string (e.g. '027')
# or as an octal umask as we know it (e.g. 002). If its not
# a string assume it to be octal and turn it into an octal
# string.
umask = settings['environment']['umask']
if not isinstance(umask, string_types):
umask = '%s' % oct(umask)
ctxt = {
'additional_user_paths':
settings['environment']['extra_user_paths'],
'umask': umask,
'pwd_max_age': settings['auth']['pw_max_age'],
'pwd_min_age': settings['auth']['pw_min_age'],
'uid_min': settings['auth']['uid_min'],
'sys_uid_min': settings['auth']['sys_uid_min'],
'sys_uid_max': settings['auth']['sys_uid_max'],
'gid_min': settings['auth']['gid_min'],
'sys_gid_min': settings['auth']['sys_gid_min'],
'sys_gid_max': settings['auth']['sys_gid_max'],
'login_retries': settings['auth']['retries'],
'login_timeout': settings['auth']['timeout'],
'chfn_restrict': settings['auth']['chfn_restrict'],
'allow_login_without_home': settings['auth']['allow_homeless']
}
return ctxt

View File

@ -0,0 +1,50 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.contrib.hardening.audits.file import (
FilePermissionAudit,
ReadOnly,
)
from charmhelpers.contrib.hardening import utils
def get_audits():
"""Get OS hardening access audits.
:returns: dictionary of audits
"""
audits = []
settings = utils.get_settings('os')
# Remove write permissions from $PATH folders for all regular users.
# This prevents changing system-wide commands from normal users.
path_folders = {'/usr/local/sbin',
'/usr/local/bin',
'/usr/sbin',
'/usr/bin',
'/bin'}
extra_user_paths = settings['environment']['extra_user_paths']
path_folders.update(extra_user_paths)
audits.append(ReadOnly(path_folders))
# Only allow the root user to have access to the shadow file.
audits.append(FilePermissionAudit('/etc/shadow', 'root', 'root', 0o0600))
if 'change_user' not in settings['security']['users_allow']:
# su should only be accessible to user and group root, unless it is
# expressly defined to allow users to change to root via the
# security_users_allow config option.
audits.append(FilePermissionAudit('/bin/su', 'root', 'root', 0o750))
return audits

View File

@ -0,0 +1,132 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import (
check_output,
CalledProcessError,
)
from charmhelpers.core.hookenv import (
log,
DEBUG,
ERROR,
)
from charmhelpers.fetch import (
apt_install,
apt_purge,
apt_update,
)
from charmhelpers.contrib.hardening.audits.file import (
TemplatedFile,
DeletedFile,
)
from charmhelpers.contrib.hardening import utils
from charmhelpers.contrib.hardening.host import TEMPLATES_DIR
def get_audits():
"""Get OS hardening PAM authentication audits.
:returns: dictionary of audits
"""
audits = []
settings = utils.get_settings('os')
if settings['auth']['pam_passwdqc_enable']:
audits.append(PasswdqcPAM('/etc/passwdqc.conf'))
if settings['auth']['retries']:
audits.append(Tally2PAM('/usr/share/pam-configs/tally2'))
else:
audits.append(DeletedFile('/usr/share/pam-configs/tally2'))
return audits
class PasswdqcPAMContext(object):
def __call__(self):
ctxt = {}
settings = utils.get_settings('os')
ctxt['auth_pam_passwdqc_options'] = \
settings['auth']['pam_passwdqc_options']
return ctxt
class PasswdqcPAM(TemplatedFile):
"""The PAM Audit verifies the linux PAM settings."""
def __init__(self, path):
super(PasswdqcPAM, self).__init__(path=path,
template_dir=TEMPLATES_DIR,
context=PasswdqcPAMContext(),
user='root',
group='root',
mode=0o0640)
def pre_write(self):
# Always remove?
for pkg in ['libpam-ccreds', 'libpam-cracklib']:
log("Purging package '%s'" % pkg, level=DEBUG),
apt_purge(pkg)
apt_update(fatal=True)
for pkg in ['libpam-passwdqc']:
log("Installing package '%s'" % pkg, level=DEBUG),
apt_install(pkg)
def post_write(self):
"""Updates the PAM configuration after the file has been written"""
try:
check_output(['pam-auth-update', '--package'])
except CalledProcessError as e:
log('Error calling pam-auth-update: %s' % e, level=ERROR)
class Tally2PAMContext(object):
def __call__(self):
ctxt = {}
settings = utils.get_settings('os')
ctxt['auth_lockout_time'] = settings['auth']['lockout_time']
ctxt['auth_retries'] = settings['auth']['retries']
return ctxt
class Tally2PAM(TemplatedFile):
"""The PAM Audit verifies the linux PAM settings."""
def __init__(self, path):
super(Tally2PAM, self).__init__(path=path,
template_dir=TEMPLATES_DIR,
context=Tally2PAMContext(),
user='root',
group='root',
mode=0o0640)
def pre_write(self):
# Always remove?
apt_purge('libpam-ccreds')
apt_update(fatal=True)
apt_install('libpam-modules')
def post_write(self):
"""Updates the PAM configuration after the file has been written"""
try:
check_output(['pam-auth-update', '--package'])
except CalledProcessError as e:
log('Error calling pam-auth-update: %s' % e, level=ERROR)

View File

@ -0,0 +1,43 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.contrib.hardening.audits.file import TemplatedFile
from charmhelpers.contrib.hardening.host import TEMPLATES_DIR
from charmhelpers.contrib.hardening import utils
def get_audits():
"""Get OS hardening profile audits.
:returns: dictionary of audits
"""
audits = []
settings = utils.get_settings('os')
# If core dumps are not enabled, then don't allow core dumps to be
# created as they may contain sensitive information.
if not settings['security']['kernel_enable_core_dump']:
audits.append(TemplatedFile('/etc/profile.d/pinerolo_profile.sh',
ProfileContext(),
template_dir=TEMPLATES_DIR,
mode=0o0755, user='root', group='root'))
return audits
class ProfileContext(object):
def __call__(self):
ctxt = {}
return ctxt

View File

@ -0,0 +1,37 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.contrib.hardening.audits.file import TemplatedFile
from charmhelpers.contrib.hardening.host import TEMPLATES_DIR
from charmhelpers.contrib.hardening import utils
def get_audits():
"""Get OS hardening Secure TTY audits.
:returns: dictionary of audits
"""
audits = []
audits.append(TemplatedFile('/etc/securetty', SecureTTYContext(),
template_dir=TEMPLATES_DIR,
mode=0o0400, user='root', group='root'))
return audits
class SecureTTYContext(object):
def __call__(self):
settings = utils.get_settings('os')
ctxt = {'ttys': settings['auth']['root_ttys']}
return ctxt

View File

@ -0,0 +1,129 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
from charmhelpers.core.hookenv import (
log,
INFO,
)
from charmhelpers.contrib.hardening.audits.file import NoSUIDSGIDAudit
from charmhelpers.contrib.hardening import utils
BLACKLIST = ['/usr/bin/rcp', '/usr/bin/rlogin', '/usr/bin/rsh',
'/usr/libexec/openssh/ssh-keysign',
'/usr/lib/openssh/ssh-keysign',
'/sbin/netreport',
'/usr/sbin/usernetctl',
'/usr/sbin/userisdnctl',
'/usr/sbin/pppd',
'/usr/bin/lockfile',
'/usr/bin/mail-lock',
'/usr/bin/mail-unlock',
'/usr/bin/mail-touchlock',
'/usr/bin/dotlockfile',
'/usr/bin/arping',
'/usr/sbin/uuidd',
'/usr/bin/mtr',
'/usr/lib/evolution/camel-lock-helper-1.2',
'/usr/lib/pt_chown',
'/usr/lib/eject/dmcrypt-get-device',
'/usr/lib/mc/cons.saver']
WHITELIST = ['/bin/mount', '/bin/ping', '/bin/su', '/bin/umount',
'/sbin/pam_timestamp_check', '/sbin/unix_chkpwd', '/usr/bin/at',
'/usr/bin/gpasswd', '/usr/bin/locate', '/usr/bin/newgrp',
'/usr/bin/passwd', '/usr/bin/ssh-agent',
'/usr/libexec/utempter/utempter', '/usr/sbin/lockdev',
'/usr/sbin/sendmail.sendmail', '/usr/bin/expiry',
'/bin/ping6', '/usr/bin/traceroute6.iputils',
'/sbin/mount.nfs', '/sbin/umount.nfs',
'/sbin/mount.nfs4', '/sbin/umount.nfs4',
'/usr/bin/crontab',
'/usr/bin/wall', '/usr/bin/write',
'/usr/bin/screen',
'/usr/bin/mlocate',
'/usr/bin/chage', '/usr/bin/chfn', '/usr/bin/chsh',
'/bin/fusermount',
'/usr/bin/pkexec',
'/usr/bin/sudo', '/usr/bin/sudoedit',
'/usr/sbin/postdrop', '/usr/sbin/postqueue',
'/usr/sbin/suexec',
'/usr/lib/squid/ncsa_auth', '/usr/lib/squid/pam_auth',
'/usr/kerberos/bin/ksu',
'/usr/sbin/ccreds_validate',
'/usr/bin/Xorg',
'/usr/bin/X',
'/usr/lib/dbus-1.0/dbus-daemon-launch-helper',
'/usr/lib/vte/gnome-pty-helper',
'/usr/lib/libvte9/gnome-pty-helper',
'/usr/lib/libvte-2.90-9/gnome-pty-helper']
def get_audits():
"""Get OS hardening suid/sgid audits.
:returns: dictionary of audits
"""
checks = []
settings = utils.get_settings('os')
if not settings['security']['suid_sgid_enforce']:
log("Skipping suid/sgid hardening", level=INFO)
return checks
# Build the blacklist and whitelist of files for suid/sgid checks.
# There are a total of 4 lists:
# 1. the system blacklist
# 2. the system whitelist
# 3. the user blacklist
# 4. the user whitelist
#
# The blacklist is the set of paths which should NOT have the suid/sgid bit
# set and the whitelist is the set of paths which MAY have the suid/sgid
# bit setl. The user whitelist/blacklist effectively override the system
# whitelist/blacklist.
u_b = settings['security']['suid_sgid_blacklist']
u_w = settings['security']['suid_sgid_whitelist']
blacklist = set(BLACKLIST) - set(u_w + u_b)
whitelist = set(WHITELIST) - set(u_b + u_w)
checks.append(NoSUIDSGIDAudit(blacklist))
dry_run = settings['security']['suid_sgid_dry_run_on_unknown']
if settings['security']['suid_sgid_remove_from_unknown'] or dry_run:
# If the policy is a dry_run (e.g. complain only) or remove unknown
# suid/sgid bits then find all of the paths which have the suid/sgid
# bit set and then remove the whitelisted paths.
root_path = settings['environment']['root_path']
unknown_paths = find_paths_with_suid_sgid(root_path) - set(whitelist)
checks.append(NoSUIDSGIDAudit(unknown_paths, unless=dry_run))
return checks
def find_paths_with_suid_sgid(root_path):
"""Finds all paths/files which have an suid/sgid bit enabled.
Starting with the root_path, this will recursively find all paths which
have an suid or sgid bit set.
"""
cmd = ['find', root_path, '-perm', '-4000', '-o', '-perm', '-2000',
'-type', 'f', '!', '-path', '/proc/*', '-print']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, _ = p.communicate()
return set(out.split('\n'))

View File

@ -0,0 +1,209 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import platform
import re
import six
import subprocess
from charmhelpers.core.hookenv import (
log,
INFO,
WARNING,
)
from charmhelpers.contrib.hardening import utils
from charmhelpers.contrib.hardening.audits.file import (
FilePermissionAudit,
TemplatedFile,
)
from charmhelpers.contrib.hardening.host import TEMPLATES_DIR
SYSCTL_DEFAULTS = """net.ipv4.ip_forward=%(net_ipv4_ip_forward)s
net.ipv6.conf.all.forwarding=%(net_ipv6_conf_all_forwarding)s
net.ipv4.conf.all.rp_filter=1
net.ipv4.conf.default.rp_filter=1
net.ipv4.icmp_echo_ignore_broadcasts=1
net.ipv4.icmp_ignore_bogus_error_responses=1
net.ipv4.icmp_ratelimit=100
net.ipv4.icmp_ratemask=88089
net.ipv6.conf.all.disable_ipv6=%(net_ipv6_conf_all_disable_ipv6)s
net.ipv4.tcp_timestamps=%(net_ipv4_tcp_timestamps)s
net.ipv4.conf.all.arp_ignore=%(net_ipv4_conf_all_arp_ignore)s
net.ipv4.conf.all.arp_announce=%(net_ipv4_conf_all_arp_announce)s
net.ipv4.tcp_rfc1337=1
net.ipv4.tcp_syncookies=1
net.ipv4.conf.all.shared_media=1
net.ipv4.conf.default.shared_media=1
net.ipv4.conf.all.accept_source_route=0
net.ipv4.conf.default.accept_source_route=0
net.ipv4.conf.all.accept_redirects=0
net.ipv4.conf.default.accept_redirects=0
net.ipv6.conf.all.accept_redirects=0
net.ipv6.conf.default.accept_redirects=0
net.ipv4.conf.all.secure_redirects=0
net.ipv4.conf.default.secure_redirects=0
net.ipv4.conf.all.send_redirects=0
net.ipv4.conf.default.send_redirects=0
net.ipv4.conf.all.log_martians=0
net.ipv6.conf.default.router_solicitations=0
net.ipv6.conf.default.accept_ra_rtr_pref=0
net.ipv6.conf.default.accept_ra_pinfo=0
net.ipv6.conf.default.accept_ra_defrtr=0
net.ipv6.conf.default.autoconf=0
net.ipv6.conf.default.dad_transmits=0
net.ipv6.conf.default.max_addresses=1
net.ipv6.conf.all.accept_ra=0
net.ipv6.conf.default.accept_ra=0
kernel.modules_disabled=%(kernel_modules_disabled)s
kernel.sysrq=%(kernel_sysrq)s
fs.suid_dumpable=%(fs_suid_dumpable)s
kernel.randomize_va_space=2
"""
def get_audits():
"""Get OS hardening sysctl audits.
:returns: dictionary of audits
"""
audits = []
settings = utils.get_settings('os')
# Apply the sysctl settings which are configured to be applied.
audits.append(SysctlConf())
# Make sure that only root has access to the sysctl.conf file, and
# that it is read-only.
audits.append(FilePermissionAudit('/etc/sysctl.conf',
user='root',
group='root', mode=0o0440))
# If module loading is not enabled, then ensure that the modules
# file has the appropriate permissions and rebuild the initramfs
if not settings['security']['kernel_enable_module_loading']:
audits.append(ModulesTemplate())
return audits
class ModulesContext(object):
def __call__(self):
settings = utils.get_settings('os')
with open('/proc/cpuinfo', 'r') as fd:
cpuinfo = fd.readlines()
for line in cpuinfo:
match = re.search(r"^vendor_id\s+:\s+(.+)", line)
if match:
vendor = match.group(1)
if vendor == "GenuineIntel":
vendor = "intel"
elif vendor == "AuthenticAMD":
vendor = "amd"
ctxt = {'arch': platform.processor(),
'cpuVendor': vendor,
'desktop_enable': settings['general']['desktop_enable']}
return ctxt
class ModulesTemplate(object):
def __init__(self):
super(ModulesTemplate, self).__init__('/etc/initramfs-tools/modules',
ModulesContext(),
templates_dir=TEMPLATES_DIR,
user='root', group='root',
mode=0o0440)
def post_write(self):
subprocess.check_call(['update-initramfs', '-u'])
class SysCtlHardeningContext(object):
def __call__(self):
settings = utils.get_settings('os')
ctxt = {'sysctl': {}}
log("Applying sysctl settings", level=INFO)
extras = {'net_ipv4_ip_forward': 0,
'net_ipv6_conf_all_forwarding': 0,
'net_ipv6_conf_all_disable_ipv6': 1,
'net_ipv4_tcp_timestamps': 0,
'net_ipv4_conf_all_arp_ignore': 0,
'net_ipv4_conf_all_arp_announce': 0,
'kernel_sysrq': 0,
'fs_suid_dumpable': 0,
'kernel_modules_disabled': 1}
if settings['sysctl']['ipv6_enable']:
extras['net_ipv6_conf_all_disable_ipv6'] = 0
if settings['sysctl']['forwarding']:
extras['net_ipv4_ip_forward'] = 1
extras['net_ipv6_conf_all_forwarding'] = 1
if settings['sysctl']['arp_restricted']:
extras['net_ipv4_conf_all_arp_ignore'] = 1
extras['net_ipv4_conf_all_arp_announce'] = 2
if settings['security']['kernel_enable_module_loading']:
extras['kernel_modules_disabled'] = 0
if settings['sysctl']['kernel_enable_sysrq']:
sysrq_val = settings['sysctl']['kernel_secure_sysrq']
extras['kernel_sysrq'] = sysrq_val
if settings['security']['kernel_enable_core_dump']:
extras['fs_suid_dumpable'] = 1
settings.update(extras)
for d in (SYSCTL_DEFAULTS % settings).split():
d = d.strip().partition('=')
key = d[0].strip()
path = os.path.join('/proc/sys', key.replace('.', '/'))
if not os.path.exists(path):
log("Skipping '%s' since '%s' does not exist" % (key, path),
level=WARNING)
continue
ctxt['sysctl'][key] = d[2] or None
# Translate for python3
return {'sysctl_settings':
[(k, v) for k, v in six.iteritems(ctxt['sysctl'])]}
class SysctlConf(TemplatedFile):
"""An audit check for sysctl settings."""
def __init__(self):
self.conffile = '/etc/sysctl.d/99-juju-hardening.conf'
super(SysctlConf, self).__init__(self.conffile,
SysCtlHardeningContext(),
template_dir=TEMPLATES_DIR,
user='root', group='root',
mode=0o0440)
def post_write(self):
try:
subprocess.check_call(['sysctl', '-p', self.conffile])
except subprocess.CalledProcessError as e:
# NOTE: on some systems if sysctl cannot apply all settings it
# will return non-zero as well.
log("sysctl command returned an error (maybe some "
"keys could not be set) - %s" % (e),
level=WARNING)

View File

@ -0,0 +1,8 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
{% if disable_core_dump -%}
# Prevent core dumps for all users. These are usually only needed by developers and may contain sensitive information.
* hard core 0
{% endif %}

View File

@ -0,0 +1,7 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
{% for key, value in sysctl_settings -%}
{{ key }}={{ value }}
{% endfor -%}

View File

@ -0,0 +1,349 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
#
# /etc/login.defs - Configuration control definitions for the login package.
#
# Three items must be defined: MAIL_DIR, ENV_SUPATH, and ENV_PATH.
# If unspecified, some arbitrary (and possibly incorrect) value will
# be assumed. All other items are optional - if not specified then
# the described action or option will be inhibited.
#
# Comment lines (lines beginning with "#") and blank lines are ignored.
#
# Modified for Linux. --marekm
# REQUIRED for useradd/userdel/usermod
# Directory where mailboxes reside, _or_ name of file, relative to the
# home directory. If you _do_ define MAIL_DIR and MAIL_FILE,
# MAIL_DIR takes precedence.
#
# Essentially:
# - MAIL_DIR defines the location of users mail spool files
# (for mbox use) by appending the username to MAIL_DIR as defined
# below.
# - MAIL_FILE defines the location of the users mail spool files as the
# fully-qualified filename obtained by prepending the user home
# directory before $MAIL_FILE
#
# NOTE: This is no more used for setting up users MAIL environment variable
# which is, starting from shadow 4.0.12-1 in Debian, entirely the
# job of the pam_mail PAM modules
# See default PAM configuration files provided for
# login, su, etc.
#
# This is a temporary situation: setting these variables will soon
# move to /etc/default/useradd and the variables will then be
# no more supported
MAIL_DIR /var/mail
#MAIL_FILE .mail
#
# Enable logging and display of /var/log/faillog login failure info.
# This option conflicts with the pam_tally PAM module.
#
FAILLOG_ENAB yes
#
# Enable display of unknown usernames when login failures are recorded.
#
# WARNING: Unknown usernames may become world readable.
# See #290803 and #298773 for details about how this could become a security
# concern
LOG_UNKFAIL_ENAB no
#
# Enable logging of successful logins
#
LOG_OK_LOGINS yes
#
# Enable "syslog" logging of su activity - in addition to sulog file logging.
# SYSLOG_SG_ENAB does the same for newgrp and sg.
#
SYSLOG_SU_ENAB yes
SYSLOG_SG_ENAB yes
#
# If defined, all su activity is logged to this file.
#
#SULOG_FILE /var/log/sulog
#
# If defined, file which maps tty line to TERM environment parameter.
# Each line of the file is in a format something like "vt100 tty01".
#
#TTYTYPE_FILE /etc/ttytype
#
# If defined, login failures will be logged here in a utmp format
# last, when invoked as lastb, will read /var/log/btmp, so...
#
FTMP_FILE /var/log/btmp
#
# If defined, the command name to display when running "su -". For
# example, if this is defined as "su" then a "ps" will display the
# command is "-su". If not defined, then "ps" would display the
# name of the shell actually being run, e.g. something like "-sh".
#
SU_NAME su
#
# If defined, file which inhibits all the usual chatter during the login
# sequence. If a full pathname, then hushed mode will be enabled if the
# user's name or shell are found in the file. If not a full pathname, then
# hushed mode will be enabled if the file exists in the user's home directory.
#
HUSHLOGIN_FILE .hushlogin
#HUSHLOGIN_FILE /etc/hushlogins
#
# *REQUIRED* The default PATH settings, for superuser and normal users.
#
# (they are minimal, add the rest in the shell startup files)
ENV_SUPATH PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
ENV_PATH PATH=/usr/local/bin:/usr/bin:/bin{% if additional_user_paths %}{{ additional_user_paths }}{% endif %}
#
# Terminal permissions
#
# TTYGROUP Login tty will be assigned this group ownership.
# TTYPERM Login tty will be set to this permission.
#
# If you have a "write" program which is "setgid" to a special group
# which owns the terminals, define TTYGROUP to the group number and
# TTYPERM to 0620. Otherwise leave TTYGROUP commented out and assign
# TTYPERM to either 622 or 600.
#
# In Debian /usr/bin/bsd-write or similar programs are setgid tty
# However, the default and recommended value for TTYPERM is still 0600
# to not allow anyone to write to anyone else console or terminal
# Users can still allow other people to write them by issuing
# the "mesg y" command.
TTYGROUP tty
TTYPERM 0600
#
# Login configuration initializations:
#
# ERASECHAR Terminal ERASE character ('\010' = backspace).
# KILLCHAR Terminal KILL character ('\025' = CTRL/U).
# UMASK Default "umask" value.
#
# The ERASECHAR and KILLCHAR are used only on System V machines.
#
# UMASK is the default umask value for pam_umask and is used by
# useradd and newusers to set the mode of the new home directories.
# 022 is the "historical" value in Debian for UMASK
# 027, or even 077, could be considered better for privacy
# There is no One True Answer here : each sysadmin must make up his/her
# mind.
#
# If USERGROUPS_ENAB is set to "yes", that will modify this UMASK default value
# for private user groups, i. e. the uid is the same as gid, and username is
# the same as the primary group name: for these, the user permissions will be
# used as group permissions, e. g. 022 will become 002.
#
# Prefix these values with "0" to get octal, "0x" to get hexadecimal.
#
ERASECHAR 0177
KILLCHAR 025
UMASK {{ umask }}
# Enable setting of the umask group bits to be the same as owner bits (examples: `022` -> `002`, `077` -> `007`) for non-root users, if the uid is the same as gid, and username is the same as the primary group name.
# If set to yes, userdel will remove the user´s group if it contains no more members, and useradd will create by default a group with the name of the user.
USERGROUPS_ENAB yes
#
# Password aging controls:
#
# PASS_MAX_DAYS Maximum number of days a password may be used.
# PASS_MIN_DAYS Minimum number of days allowed between password changes.
# PASS_WARN_AGE Number of days warning given before a password expires.
#
PASS_MAX_DAYS {{ pwd_max_age }}
PASS_MIN_DAYS {{ pwd_min_age }}
PASS_WARN_AGE 7
#
# Min/max values for automatic uid selection in useradd
#
UID_MIN {{ uid_min }}
UID_MAX 60000
# System accounts
SYS_UID_MIN {{ sys_uid_min }}
SYS_UID_MAX {{ sys_uid_max }}
# Min/max values for automatic gid selection in groupadd
GID_MIN {{ gid_min }}
GID_MAX 60000
# System accounts
SYS_GID_MIN {{ sys_gid_min }}
SYS_GID_MAX {{ sys_gid_max }}
#
# Max number of login retries if password is bad. This will most likely be
# overriden by PAM, since the default pam_unix module has it's own built
# in of 3 retries. However, this is a safe fallback in case you are using
# an authentication module that does not enforce PAM_MAXTRIES.
#
LOGIN_RETRIES {{ login_retries }}
#
# Max time in seconds for login
#
LOGIN_TIMEOUT {{ login_timeout }}
#
# Which fields may be changed by regular users using chfn - use
# any combination of letters "frwh" (full name, room number, work
# phone, home phone). If not defined, no changes are allowed.
# For backward compatibility, "yes" = "rwh" and "no" = "frwh".
#
{% if chfn_restrict %}
CHFN_RESTRICT {{ chfn_restrict }}
{% endif %}
#
# Should login be allowed if we can't cd to the home directory?
# Default in no.
#
DEFAULT_HOME {% if allow_login_without_home %} yes {% else %} no {% endif %}
#
# If defined, this command is run when removing a user.
# It should remove any at/cron/print jobs etc. owned by
# the user to be removed (passed as the first argument).
#
#USERDEL_CMD /usr/sbin/userdel_local
#
# Enable setting of the umask group bits to be the same as owner bits
# (examples: 022 -> 002, 077 -> 007) for non-root users, if the uid is
# the same as gid, and username is the same as the primary group name.
#
# If set to yes, userdel will remove the user´s group if it contains no
# more members, and useradd will create by default a group with the name
# of the user.
#
USERGROUPS_ENAB yes
#
# Instead of the real user shell, the program specified by this parameter
# will be launched, although its visible name (argv[0]) will be the shell's.
# The program may do whatever it wants (logging, additional authentification,
# banner, ...) before running the actual shell.
#
# FAKE_SHELL /bin/fakeshell
#
# If defined, either full pathname of a file containing device names or
# a ":" delimited list of device names. Root logins will be allowed only
# upon these devices.
#
# This variable is used by login and su.
#
#CONSOLE /etc/consoles
#CONSOLE console:tty01:tty02:tty03:tty04
#
# List of groups to add to the user's supplementary group set
# when logging in on the console (as determined by the CONSOLE
# setting). Default is none.
#
# Use with caution - it is possible for users to gain permanent
# access to these groups, even when not logged in on the console.
# How to do it is left as an exercise for the reader...
#
# This variable is used by login and su.
#
#CONSOLE_GROUPS floppy:audio:cdrom
#
# If set to "yes", new passwords will be encrypted using the MD5-based
# algorithm compatible with the one used by recent releases of FreeBSD.
# It supports passwords of unlimited length and longer salt strings.
# Set to "no" if you need to copy encrypted passwords to other systems
# which don't understand the new algorithm. Default is "no".
#
# This variable is deprecated. You should use ENCRYPT_METHOD.
#
MD5_CRYPT_ENAB no
#
# If set to MD5 , MD5-based algorithm will be used for encrypting password
# If set to SHA256, SHA256-based algorithm will be used for encrypting password
# If set to SHA512, SHA512-based algorithm will be used for encrypting password
# If set to DES, DES-based algorithm will be used for encrypting password (default)
# Overrides the MD5_CRYPT_ENAB option
#
# Note: It is recommended to use a value consistent with
# the PAM modules configuration.
#
ENCRYPT_METHOD SHA512
#
# Only used if ENCRYPT_METHOD is set to SHA256 or SHA512.
#
# Define the number of SHA rounds.
# With a lot of rounds, it is more difficult to brute forcing the password.
# But note also that it more CPU resources will be needed to authenticate
# users.
#
# If not specified, the libc will choose the default number of rounds (5000).
# The values must be inside the 1000-999999999 range.
# If only one of the MIN or MAX values is set, then this value will be used.
# If MIN > MAX, the highest value will be used.
#
# SHA_CRYPT_MIN_ROUNDS 5000
# SHA_CRYPT_MAX_ROUNDS 5000
################# OBSOLETED BY PAM ##############
# #
# These options are now handled by PAM. Please #
# edit the appropriate file in /etc/pam.d/ to #
# enable the equivelants of them.
#
###############
#MOTD_FILE
#DIALUPS_CHECK_ENAB
#LASTLOG_ENAB
#MAIL_CHECK_ENAB
#OBSCURE_CHECKS_ENAB
#PORTTIME_CHECKS_ENAB
#SU_WHEEL_ONLY
#CRACKLIB_DICTPATH
#PASS_CHANGE_TRIES
#PASS_ALWAYS_WARN
#ENVIRON_FILE
#NOLOGINS_FILE
#ISSUE_FILE
#PASS_MIN_LEN
#PASS_MAX_LEN
#ULIMIT
#ENV_HZ
#CHFN_AUTH
#CHSH_AUTH
#FAIL_DELAY
################# OBSOLETED #######################
# #
# These options are no more handled by shadow. #
# #
# Shadow utilities will display a warning if they #
# still appear. #
# #
###################################################
# CLOSE_SESSIONS
# LOGIN_STRING
# NO_PASSWORD_CONSOLE
# QMAIL_DIR

View File

@ -0,0 +1,117 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
# /etc/modules: kernel modules to load at boot time.
#
# This file contains the names of kernel modules that should be loaded
# at boot time, one per line. Lines beginning with "#" are ignored.
# Parameters can be specified after the module name.
# Arch
# ----
#
# Modules for certains builds, contains support modules and some CPU-specific optimizations.
{% if arch == "x86_64" -%}
# Optimize for x86_64 cryptographic features
twofish-x86_64-3way
twofish-x86_64
aes-x86_64
salsa20-x86_64
blowfish-x86_64
{% endif -%}
{% if cpuVendor == "intel" -%}
# Intel-specific optimizations
ghash-clmulni-intel
aesni-intel
kvm-intel
{% endif -%}
{% if cpuVendor == "amd" -%}
# AMD-specific optimizations
kvm-amd
{% endif -%}
kvm
# Crypto
# ------
# Some core modules which comprise strong cryptography.
blowfish_common
blowfish_generic
ctr
cts
lrw
lzo
rmd160
rmd256
rmd320
serpent
sha512_generic
twofish_common
twofish_generic
xts
zlib
# Drivers
# -------
# Basics
lp
rtc
loop
# Filesystems
ext2
btrfs
{% if desktop_enable -%}
# Desktop
psmouse
snd
snd_ac97_codec
snd_intel8x0
snd_page_alloc
snd_pcm
snd_timer
soundcore
usbhid
{% endif -%}
# Lib
# ---
xz
# Net
# ---
# All packets needed for netfilter rules (ie iptables, ebtables).
ip_tables
x_tables
iptable_filter
iptable_nat
# Targets
ipt_LOG
ipt_REJECT
# Modules
xt_connlimit
xt_tcpudp
xt_recent
xt_limit
xt_conntrack
nf_conntrack
nf_conntrack_ipv4
nf_defrag_ipv4
xt_state
nf_nat
# Addons
xt_pknock

View File

@ -0,0 +1,11 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
Name: passwdqc password strength enforcement
Default: yes
Priority: 1024
Conflicts: cracklib
Password-Type: Primary
Password:
requisite pam_passwdqc.so {{ auth_pam_passwdqc_options }}

View File

@ -0,0 +1,8 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
# Disable core dumps via soft limits for all users. Compliance to this setting
# is voluntary and can be modified by users up to a hard limit. This setting is
# a sane default.
ulimit -S -c 0 > /dev/null 2>&1

View File

@ -0,0 +1,11 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
# A list of TTYs, from which root can log in
# see `man securetty` for reference
{% if ttys -%}
{% for tty in ttys -%}
{{ tty }}
{% endfor -%}
{% endif -%}

View File

@ -0,0 +1,14 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
Name: tally2 lockout after failed attempts enforcement
Default: yes
Priority: 1024
Conflicts: cracklib
Auth-Type: Primary
Auth-Initial:
required pam_tally2.so deny={{ auth_retries }} onerr=fail unlock_time={{ auth_lockout_time }}
Account-Type: Primary
Account-Initial:
required pam_tally2.so

View File

@ -0,0 +1,17 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import path
TEMPLATES_DIR = path.join(path.dirname(__file__), 'templates')

View File

@ -0,0 +1,29 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.core.hookenv import (
log,
DEBUG,
)
from charmhelpers.contrib.hardening.mysql.checks import config
def run_mysql_checks():
log("Starting MySQL hardening checks.", level=DEBUG)
checks = config.get_audits()
for check in checks:
log("Running '%s' check" % (check.__class__.__name__), level=DEBUG)
check.ensure_compliance()
log("MySQL hardening checks complete.", level=DEBUG)

View File

@ -0,0 +1,87 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
import subprocess
from charmhelpers.core.hookenv import (
log,
WARNING,
)
from charmhelpers.contrib.hardening.audits.file import (
FilePermissionAudit,
DirectoryPermissionAudit,
TemplatedFile,
)
from charmhelpers.contrib.hardening.mysql import TEMPLATES_DIR
from charmhelpers.contrib.hardening import utils
def get_audits():
"""Get MySQL hardening config audits.
:returns: dictionary of audits
"""
if subprocess.call(['which', 'mysql'], stdout=subprocess.PIPE) != 0:
log("MySQL does not appear to be installed on this node - "
"skipping mysql hardening", level=WARNING)
return []
settings = utils.get_settings('mysql')
hardening_settings = settings['hardening']
my_cnf = hardening_settings['mysql-conf']
audits = [
FilePermissionAudit(paths=[my_cnf], user='root',
group='root', mode=0o0600),
TemplatedFile(hardening_settings['hardening-conf'],
MySQLConfContext(),
TEMPLATES_DIR,
mode=0o0750,
user='mysql',
group='root',
service_actions=[{'service': 'mysql',
'actions': ['restart']}]),
# MySQL and Percona charms do not allow configuration of the
# data directory, so use the default.
DirectoryPermissionAudit('/var/lib/mysql',
user='mysql',
group='mysql',
recursive=False,
mode=0o755),
DirectoryPermissionAudit('/etc/mysql',
user='root',
group='root',
recursive=False,
mode=0o700),
]
return audits
class MySQLConfContext(object):
"""Defines the set of key/value pairs to set in a mysql config file.
This context, when called, will return a dictionary containing the
key/value pairs of setting to specify in the
/etc/mysql/conf.d/hardening.cnf file.
"""
def __call__(self):
settings = utils.get_settings('mysql')
# Translate for python3
return {'mysql_settings':
[(k, v) for k, v in six.iteritems(settings['security'])]}

View File

@ -0,0 +1,12 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
[mysqld]
{% for setting, value in mysql_settings -%}
{% if value == 'True' -%}
{{ setting }}
{% elif value != 'None' and value != None -%}
{{ setting }} = {{ value }}
{% endif -%}
{% endfor -%}

View File

@ -0,0 +1,17 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import path
TEMPLATES_DIR = path.join(path.dirname(__file__), 'templates')

View File

@ -0,0 +1,29 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.core.hookenv import (
log,
DEBUG,
)
from charmhelpers.contrib.hardening.ssh.checks import config
def run_ssh_checks():
log("Starting SSH hardening checks.", level=DEBUG)
checks = config.get_audits()
for check in checks:
log("Running '%s' check" % (check.__class__.__name__), level=DEBUG)
check.ensure_compliance()
log("SSH hardening checks complete.", level=DEBUG)

View File

@ -0,0 +1,427 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from charmhelpers.contrib.network.ip import (
get_address_in_network,
get_iface_addr,
is_ip,
)
from charmhelpers.core.hookenv import (
log,
DEBUG,
)
from charmhelpers.fetch import (
apt_install,
apt_update,
)
from charmhelpers.core.host import lsb_release
from charmhelpers.contrib.hardening.audits.file import (
TemplatedFile,
FileContentAudit,
)
from charmhelpers.contrib.hardening.ssh import TEMPLATES_DIR
from charmhelpers.contrib.hardening import utils
def get_audits():
"""Get SSH hardening config audits.
:returns: dictionary of audits
"""
audits = [SSHConfig(), SSHDConfig(), SSHConfigFileContentAudit(),
SSHDConfigFileContentAudit()]
return audits
class SSHConfigContext(object):
type = 'client'
def get_macs(self, allow_weak_mac):
if allow_weak_mac:
weak_macs = 'weak'
else:
weak_macs = 'default'
default = 'hmac-sha2-512,hmac-sha2-256,hmac-ripemd160'
macs = {'default': default,
'weak': default + ',hmac-sha1'}
default = ('hmac-sha2-512-etm@openssh.com,'
'hmac-sha2-256-etm@openssh.com,'
'hmac-ripemd160-etm@openssh.com,umac-128-etm@openssh.com,'
'hmac-sha2-512,hmac-sha2-256,hmac-ripemd160')
macs_66 = {'default': default,
'weak': default + ',hmac-sha1'}
# Use newer ciphers on Ubuntu Trusty and above
if lsb_release()['DISTRIB_CODENAME'].lower() >= 'trusty':
log("Detected Ubuntu 14.04 or newer, using new macs", level=DEBUG)
macs = macs_66
return macs[weak_macs]
def get_kexs(self, allow_weak_kex):
if allow_weak_kex:
weak_kex = 'weak'
else:
weak_kex = 'default'
default = 'diffie-hellman-group-exchange-sha256'
weak = (default + ',diffie-hellman-group14-sha1,'
'diffie-hellman-group-exchange-sha1,'
'diffie-hellman-group1-sha1')
kex = {'default': default,
'weak': weak}
default = ('curve25519-sha256@libssh.org,'
'diffie-hellman-group-exchange-sha256')
weak = (default + ',diffie-hellman-group14-sha1,'
'diffie-hellman-group-exchange-sha1,'
'diffie-hellman-group1-sha1')
kex_66 = {'default': default,
'weak': weak}
# Use newer kex on Ubuntu Trusty and above
if lsb_release()['DISTRIB_CODENAME'].lower() >= 'trusty':
log('Detected Ubuntu 14.04 or newer, using new key exchange '
'algorithms', level=DEBUG)
kex = kex_66
return kex[weak_kex]
def get_ciphers(self, cbc_required):
if cbc_required:
weak_ciphers = 'weak'
else:
weak_ciphers = 'default'
default = 'aes256-ctr,aes192-ctr,aes128-ctr'
cipher = {'default': default,
'weak': default + 'aes256-cbc,aes192-cbc,aes128-cbc'}
default = ('chacha20-poly1305@openssh.com,aes256-gcm@openssh.com,'
'aes128-gcm@openssh.com,aes256-ctr,aes192-ctr,aes128-ctr')
ciphers_66 = {'default': default,
'weak': default + ',aes256-cbc,aes192-cbc,aes128-cbc'}
# Use newer ciphers on ubuntu Trusty and above
if lsb_release()['DISTRIB_CODENAME'].lower() >= 'trusty':
log('Detected Ubuntu 14.04 or newer, using new ciphers',
level=DEBUG)
cipher = ciphers_66
return cipher[weak_ciphers]
def get_listening(self, listen=['0.0.0.0']):
"""Returns a list of addresses SSH can list on
Turns input into a sensible list of IPs SSH can listen on. Input
must be a python list of interface names, IPs and/or CIDRs.
:param listen: list of IPs, CIDRs, interface names
:returns: list of IPs available on the host
"""
if listen == ['0.0.0.0']:
return listen
value = []
for network in listen:
try:
ip = get_address_in_network(network=network, fatal=True)
except ValueError:
if is_ip(network):
ip = network
else:
try:
ip = get_iface_addr(iface=network, fatal=False)[0]
except IndexError:
continue
value.append(ip)
if value == []:
return ['0.0.0.0']
return value
def __call__(self):
settings = utils.get_settings('ssh')
if settings['common']['network_ipv6_enable']:
addr_family = 'any'
else:
addr_family = 'inet'
ctxt = {
'addr_family': addr_family,
'remote_hosts': settings['common']['remote_hosts'],
'password_auth_allowed':
settings['client']['password_authentication'],
'ports': settings['common']['ports'],
'ciphers': self.get_ciphers(settings['client']['cbc_required']),
'macs': self.get_macs(settings['client']['weak_hmac']),
'kexs': self.get_kexs(settings['client']['weak_kex']),
'roaming': settings['client']['roaming'],
}
return ctxt
class SSHConfig(TemplatedFile):
def __init__(self):
path = '/etc/ssh/ssh_config'
super(SSHConfig, self).__init__(path=path,
template_dir=TEMPLATES_DIR,
context=SSHConfigContext(),
user='root',
group='root',
mode=0o0644)
def pre_write(self):
settings = utils.get_settings('ssh')
apt_update(fatal=True)
apt_install(settings['client']['package'])
if not os.path.exists('/etc/ssh'):
os.makedir('/etc/ssh')
# NOTE: don't recurse
utils.ensure_permissions('/etc/ssh', 'root', 'root', 0o0755,
maxdepth=0)
def post_write(self):
# NOTE: don't recurse
utils.ensure_permissions('/etc/ssh', 'root', 'root', 0o0755,
maxdepth=0)
class SSHDConfigContext(SSHConfigContext):
type = 'server'
def __call__(self):
settings = utils.get_settings('ssh')
if settings['common']['network_ipv6_enable']:
addr_family = 'any'
else:
addr_family = 'inet'
ctxt = {
'ssh_ip': self.get_listening(settings['server']['listen_to']),
'password_auth_allowed':
settings['server']['password_authentication'],
'ports': settings['common']['ports'],
'addr_family': addr_family,
'ciphers': self.get_ciphers(settings['server']['cbc_required']),
'macs': self.get_macs(settings['server']['weak_hmac']),
'kexs': self.get_kexs(settings['server']['weak_kex']),
'host_key_files': settings['server']['host_key_files'],
'allow_root_with_key': settings['server']['allow_root_with_key'],
'password_authentication':
settings['server']['password_authentication'],
'use_priv_sep': settings['server']['use_privilege_separation'],
'use_pam': settings['server']['use_pam'],
'allow_x11_forwarding': settings['server']['allow_x11_forwarding'],
'print_motd': settings['server']['print_motd'],
'print_last_log': settings['server']['print_last_log'],
'client_alive_interval':
settings['server']['alive_interval'],
'client_alive_count': settings['server']['alive_count'],
'allow_tcp_forwarding': settings['server']['allow_tcp_forwarding'],
'allow_agent_forwarding':
settings['server']['allow_agent_forwarding'],
'deny_users': settings['server']['deny_users'],
'allow_users': settings['server']['allow_users'],
'deny_groups': settings['server']['deny_groups'],
'allow_groups': settings['server']['allow_groups'],
'use_dns': settings['server']['use_dns'],
'sftp_enable': settings['server']['sftp_enable'],
'sftp_group': settings['server']['sftp_group'],
'sftp_chroot': settings['server']['sftp_chroot'],
'max_auth_tries': settings['server']['max_auth_tries'],
'max_sessions': settings['server']['max_sessions'],
}
return ctxt
class SSHDConfig(TemplatedFile):
def __init__(self):
path = '/etc/ssh/sshd_config'
super(SSHDConfig, self).__init__(path=path,
template_dir=TEMPLATES_DIR,
context=SSHDConfigContext(),
user='root',
group='root',
mode=0o0600,
service_actions=[{'service': 'ssh',
'actions':
['restart']}])
def pre_write(self):
settings = utils.get_settings('ssh')
apt_update(fatal=True)
apt_install(settings['server']['package'])
if not os.path.exists('/etc/ssh'):
os.makedir('/etc/ssh')
# NOTE: don't recurse
utils.ensure_permissions('/etc/ssh', 'root', 'root', 0o0755,
maxdepth=0)
def post_write(self):
# NOTE: don't recurse
utils.ensure_permissions('/etc/ssh', 'root', 'root', 0o0755,
maxdepth=0)
class SSHConfigFileContentAudit(FileContentAudit):
def __init__(self):
self.path = '/etc/ssh/ssh_config'
super(SSHConfigFileContentAudit, self).__init__(self.path, {})
def is_compliant(self, *args, **kwargs):
self.pass_cases = []
self.fail_cases = []
settings = utils.get_settings('ssh')
if lsb_release()['DISTRIB_CODENAME'].lower() >= 'trusty':
if not settings['server']['weak_hmac']:
self.pass_cases.append(r'^MACs.+,hmac-ripemd160$')
else:
self.pass_cases.append(r'^MACs.+,hmac-sha1$')
if settings['server']['weak_kex']:
self.fail_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha256[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group14-sha1[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha1[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group1-sha1[,\s]?') # noqa
else:
self.pass_cases.append(r'^KexAlgorithms.+,diffie-hellman-group-exchange-sha256$') # noqa
self.fail_cases.append(r'^KexAlgorithms.*diffie-hellman-group14-sha1[,\s]?') # noqa
if settings['server']['cbc_required']:
self.pass_cases.append(r'^Ciphers\s.*-cbc[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes128-ctr[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes192-ctr[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes256-ctr[,\s]?')
else:
self.fail_cases.append(r'^Ciphers\s.*-cbc[,\s]?')
self.pass_cases.append(r'^Ciphers\schacha20-poly1305@openssh.com,.+') # noqa
self.pass_cases.append(r'^Ciphers\s.*aes128-ctr$')
self.pass_cases.append(r'^Ciphers\s.*aes192-ctr[,\s]?')
self.pass_cases.append(r'^Ciphers\s.*aes256-ctr[,\s]?')
else:
if not settings['client']['weak_hmac']:
self.fail_cases.append(r'^MACs.+,hmac-sha1$')
else:
self.pass_cases.append(r'^MACs.+,hmac-sha1$')
if settings['client']['weak_kex']:
self.fail_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha256[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group14-sha1[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha1[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group1-sha1[,\s]?') # noqa
else:
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha256$') # noqa
self.fail_cases.append(r'^KexAlgorithms\sdiffie-hellman-group14-sha1[,\s]?') # noqa
self.fail_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha1[,\s]?') # noqa
self.fail_cases.append(r'^KexAlgorithms\sdiffie-hellman-group1-sha1[,\s]?') # noqa
if settings['client']['cbc_required']:
self.pass_cases.append(r'^Ciphers\s.*-cbc[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes128-ctr[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes192-ctr[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes256-ctr[,\s]?')
else:
self.fail_cases.append(r'^Ciphers\s.*-cbc[,\s]?')
self.pass_cases.append(r'^Ciphers\s.*aes128-ctr[,\s]?')
self.pass_cases.append(r'^Ciphers\s.*aes192-ctr[,\s]?')
self.pass_cases.append(r'^Ciphers\s.*aes256-ctr[,\s]?')
if settings['client']['roaming']:
self.pass_cases.append(r'^UseRoaming yes$')
else:
self.fail_cases.append(r'^UseRoaming yes$')
return super(SSHConfigFileContentAudit, self).is_compliant(*args,
**kwargs)
class SSHDConfigFileContentAudit(FileContentAudit):
def __init__(self):
self.path = '/etc/ssh/sshd_config'
super(SSHDConfigFileContentAudit, self).__init__(self.path, {})
def is_compliant(self, *args, **kwargs):
self.pass_cases = []
self.fail_cases = []
settings = utils.get_settings('ssh')
if lsb_release()['DISTRIB_CODENAME'].lower() >= 'trusty':
if not settings['server']['weak_hmac']:
self.pass_cases.append(r'^MACs.+,hmac-ripemd160$')
else:
self.pass_cases.append(r'^MACs.+,hmac-sha1$')
if settings['server']['weak_kex']:
self.fail_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha256[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group14-sha1[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha1[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group1-sha1[,\s]?') # noqa
else:
self.pass_cases.append(r'^KexAlgorithms.+,diffie-hellman-group-exchange-sha256$') # noqa
self.fail_cases.append(r'^KexAlgorithms.*diffie-hellman-group14-sha1[,\s]?') # noqa
if settings['server']['cbc_required']:
self.pass_cases.append(r'^Ciphers\s.*-cbc[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes128-ctr[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes192-ctr[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes256-ctr[,\s]?')
else:
self.fail_cases.append(r'^Ciphers\s.*-cbc[,\s]?')
self.pass_cases.append(r'^Ciphers\schacha20-poly1305@openssh.com,.+') # noqa
self.pass_cases.append(r'^Ciphers\s.*aes128-ctr$')
self.pass_cases.append(r'^Ciphers\s.*aes192-ctr[,\s]?')
self.pass_cases.append(r'^Ciphers\s.*aes256-ctr[,\s]?')
else:
if not settings['server']['weak_hmac']:
self.pass_cases.append(r'^MACs.+,hmac-ripemd160$')
else:
self.pass_cases.append(r'^MACs.+,hmac-sha1$')
if settings['server']['weak_kex']:
self.fail_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha256[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group14-sha1[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha1[,\s]?') # noqa
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group1-sha1[,\s]?') # noqa
else:
self.pass_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha256$') # noqa
self.fail_cases.append(r'^KexAlgorithms\sdiffie-hellman-group14-sha1[,\s]?') # noqa
self.fail_cases.append(r'^KexAlgorithms\sdiffie-hellman-group-exchange-sha1[,\s]?') # noqa
self.fail_cases.append(r'^KexAlgorithms\sdiffie-hellman-group1-sha1[,\s]?') # noqa
if settings['server']['cbc_required']:
self.pass_cases.append(r'^Ciphers\s.*-cbc[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes128-ctr[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes192-ctr[,\s]?')
self.fail_cases.append(r'^Ciphers\s.*aes256-ctr[,\s]?')
else:
self.fail_cases.append(r'^Ciphers\s.*-cbc[,\s]?')
self.pass_cases.append(r'^Ciphers\s.*aes128-ctr[,\s]?')
self.pass_cases.append(r'^Ciphers\s.*aes192-ctr[,\s]?')
self.pass_cases.append(r'^Ciphers\s.*aes256-ctr[,\s]?')
if settings['server']['sftp_enable']:
self.pass_cases.append(r'^Subsystem\ssftp')
else:
self.fail_cases.append(r'^Subsystem\ssftp')
return super(SSHDConfigFileContentAudit, self).is_compliant(*args,
**kwargs)

View File

@ -0,0 +1,70 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
# This is the ssh client system-wide configuration file. See
# ssh_config(5) for more information. This file provides defaults for
# users, and the values can be changed in per-user configuration files
# or on the command line.
# Configuration data is parsed as follows:
# 1. command line options
# 2. user-specific file
# 3. system-wide file
# Any configuration value is only changed the first time it is set.
# Thus, host-specific definitions should be at the beginning of the
# configuration file, and defaults at the end.
# Site-wide defaults for some commonly used options. For a comprehensive
# list of available options, their meanings and defaults, please see the
# ssh_config(5) man page.
# Restrict the following configuration to be limited to this Host.
{% if remote_hosts -%}
Host {{ ' '.join(remote_hosts) }}
{% endif %}
ForwardAgent no
ForwardX11 no
ForwardX11Trusted yes
RhostsRSAAuthentication no
RSAAuthentication yes
PasswordAuthentication {{ password_auth_allowed }}
HostbasedAuthentication no
GSSAPIAuthentication no
GSSAPIDelegateCredentials no
GSSAPIKeyExchange no
GSSAPITrustDNS no
BatchMode no
CheckHostIP yes
AddressFamily {{ addr_family }}
ConnectTimeout 0
StrictHostKeyChecking ask
IdentityFile ~/.ssh/identity
IdentityFile ~/.ssh/id_rsa
IdentityFile ~/.ssh/id_dsa
# The port at the destination should be defined
{% for port in ports -%}
Port {{ port }}
{% endfor %}
Protocol 2
Cipher 3des
{% if ciphers -%}
Ciphers {{ ciphers }}
{%- endif %}
{% if macs -%}
MACs {{ macs }}
{%- endif %}
{% if kexs -%}
KexAlgorithms {{ kexs }}
{%- endif %}
EscapeChar ~
Tunnel no
TunnelDevice any:any
PermitLocalCommand no
VisualHostKey no
RekeyLimit 1G 1h
SendEnv LANG LC_*
HashKnownHosts yes
{% if roaming -%}
UseRoaming {{ roaming }}
{% endif %}

View File

@ -0,0 +1,159 @@
###############################################################################
# WARNING: This configuration file is maintained by Juju. Local changes may
# be overwritten.
###############################################################################
# Package generated configuration file
# See the sshd_config(5) manpage for details
# What ports, IPs and protocols we listen for
{% for port in ports -%}
Port {{ port }}
{% endfor -%}
AddressFamily {{ addr_family }}
# Use these options to restrict which interfaces/protocols sshd will bind to
{% if ssh_ip -%}
{% for ip in ssh_ip -%}
ListenAddress {{ ip }}
{% endfor %}
{%- else -%}
ListenAddress ::
ListenAddress 0.0.0.0
{% endif -%}
Protocol 2
{% if ciphers -%}
Ciphers {{ ciphers }}
{% endif -%}
{% if macs -%}
MACs {{ macs }}
{% endif -%}
{% if kexs -%}
KexAlgorithms {{ kexs }}
{% endif -%}
# HostKeys for protocol version 2
{% for keyfile in host_key_files -%}
HostKey {{ keyfile }}
{% endfor -%}
# Privilege Separation is turned on for security
{% if use_priv_sep -%}
UsePrivilegeSeparation {{ use_priv_sep }}
{% endif -%}
# Lifetime and size of ephemeral version 1 server key
KeyRegenerationInterval 3600
ServerKeyBits 1024
# Logging
SyslogFacility AUTH
LogLevel VERBOSE
# Authentication:
LoginGraceTime 30s
{% if allow_root_with_key -%}
PermitRootLogin without-password
{% else -%}
PermitRootLogin no
{% endif %}
PermitTunnel no
PermitUserEnvironment no
StrictModes yes
RSAAuthentication yes
PubkeyAuthentication yes
AuthorizedKeysFile %h/.ssh/authorized_keys
# Don't read the user's ~/.rhosts and ~/.shosts files
IgnoreRhosts yes
# For this to work you will also need host keys in /etc/ssh_known_hosts
RhostsRSAAuthentication no
# similar for protocol version 2
HostbasedAuthentication no
# Uncomment if you don't trust ~/.ssh/known_hosts for RhostsRSAAuthentication
IgnoreUserKnownHosts yes
# To enable empty passwords, change to yes (NOT RECOMMENDED)
PermitEmptyPasswords no
# Change to yes to enable challenge-response passwords (beware issues with
# some PAM modules and threads)
ChallengeResponseAuthentication no
# Change to no to disable tunnelled clear text passwords
PasswordAuthentication {{ password_authentication }}
# Kerberos options
KerberosAuthentication no
KerberosGetAFSToken no
KerberosOrLocalPasswd no
KerberosTicketCleanup yes
# GSSAPI options
GSSAPIAuthentication no
GSSAPICleanupCredentials yes
X11Forwarding {{ allow_x11_forwarding }}
X11DisplayOffset 10
X11UseLocalhost yes
GatewayPorts no
PrintMotd {{ print_motd }}
PrintLastLog {{ print_last_log }}
TCPKeepAlive no
UseLogin no
ClientAliveInterval {{ client_alive_interval }}
ClientAliveCountMax {{ client_alive_count }}
AllowTcpForwarding {{ allow_tcp_forwarding }}
AllowAgentForwarding {{ allow_agent_forwarding }}
MaxStartups 10:30:100
#Banner /etc/issue.net
# Allow client to pass locale environment variables
AcceptEnv LANG LC_*
# Set this to 'yes' to enable PAM authentication, account processing,
# and session processing. If this is enabled, PAM authentication will
# be allowed through the ChallengeResponseAuthentication and
# PasswordAuthentication. Depending on your PAM configuration,
# PAM authentication via ChallengeResponseAuthentication may bypass
# the setting of "PermitRootLogin without-password".
# If you just want the PAM account and session checks to run without
# PAM authentication, then enable this but set PasswordAuthentication
# and ChallengeResponseAuthentication to 'no'.
UsePAM {{ use_pam }}
{% if deny_users -%}
DenyUsers {{ deny_users }}
{% endif -%}
{% if allow_users -%}
AllowUsers {{ allow_users }}
{% endif -%}
{% if deny_groups -%}
DenyGroups {{ deny_groups }}
{% endif -%}
{% if allow_groups -%}
AllowGroups allow_groups
{% endif -%}
UseDNS {{ use_dns }}
MaxAuthTries {{ max_auth_tries }}
MaxSessions {{ max_sessions }}
{% if sftp_enable -%}
# Configuration, in case SFTP is used
## override default of no subsystems
## Subsystem sftp /opt/app/openssh5/libexec/sftp-server
Subsystem sftp internal-sftp -l VERBOSE
## These lines must appear at the *end* of sshd_config
Match Group {{ sftp_group }}
ForceCommand internal-sftp -l VERBOSE
ChrootDirectory {{ sftp_chroot }}
{% else -%}
# Configuration, in case SFTP is used
## override default of no subsystems
## Subsystem sftp /opt/app/openssh5/libexec/sftp-server
## These lines must appear at the *end* of sshd_config
Match Group sftponly
ForceCommand internal-sftp -l VERBOSE
ChrootDirectory /sftpchroot/home/%u
{% endif %}

View File

@ -0,0 +1,69 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from charmhelpers.core.hookenv import (
log,
DEBUG,
WARNING,
)
try:
from jinja2 import FileSystemLoader, Environment
except ImportError:
from charmhelpers.fetch import apt_install
from charmhelpers.fetch import apt_update
apt_update(fatal=True)
apt_install('python-jinja2', fatal=True)
from jinja2 import FileSystemLoader, Environment
# NOTE: function separated from main rendering code to facilitate easier
# mocking in unit tests.
def write(path, data):
with open(path, 'wb') as out:
out.write(data)
def get_template_path(template_dir, path):
"""Returns the template file which would be used to render the path.
The path to the template file is returned.
:param template_dir: the directory the templates are located in
:param path: the file path to be written to.
:returns: path to the template file
"""
return os.path.join(template_dir, os.path.basename(path))
def render_and_write(template_dir, path, context):
"""Renders the specified template into the file.
:param template_dir: the directory to load the template from
:param path: the path to write the templated contents to
:param context: the parameters to pass to the rendering engine
"""
env = Environment(loader=FileSystemLoader(template_dir))
template_file = os.path.basename(path)
template = env.get_template(template_file)
log('Rendering from template: %s' % template.name, level=DEBUG)
rendered_content = template.render(context)
if not rendered_content:
log("Render returned None - skipping '%s'" % path,
level=WARNING)
return
write(path, rendered_content.encode('utf-8').strip())
log('Wrote template %s' % path, level=DEBUG)

View File

@ -0,0 +1,155 @@
# Copyright 2016 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import grp
import os
import pwd
import six
import yaml
from charmhelpers.core.hookenv import (
log,
DEBUG,
INFO,
WARNING,
ERROR,
)
# Global settings cache. Since each hook fire entails a fresh module import it
# is safe to hold this in memory and not risk missing config changes (since
# they will result in a new hook fire and thus re-import).
__SETTINGS__ = {}
def _get_defaults(modules):
"""Load the default config for the provided modules.
:param modules: stack modules config defaults to lookup.
:returns: modules default config dictionary.
"""
default = os.path.join(os.path.dirname(__file__),
'defaults/%s.yaml' % (modules))
return yaml.safe_load(open(default))
def _get_schema(modules):
"""Load the config schema for the provided modules.
NOTE: this schema is intended to have 1-1 relationship with they keys in
the default config and is used a means to verify valid overrides provided
by the user.
:param modules: stack modules config schema to lookup.
:returns: modules default schema dictionary.
"""
schema = os.path.join(os.path.dirname(__file__),
'defaults/%s.yaml.schema' % (modules))
return yaml.safe_load(open(schema))
def _get_user_provided_overrides(modules):
"""Load user-provided config overrides.
:param modules: stack modules to lookup in user overrides yaml file.
:returns: overrides dictionary.
"""
overrides = os.path.join(os.environ['JUJU_CHARM_DIR'],
'hardening.yaml')
if os.path.exists(overrides):
log("Found user-provided config overrides file '%s'" %
(overrides), level=DEBUG)
settings = yaml.safe_load(open(overrides))
if settings and settings.get(modules):
log("Applying '%s' overrides" % (modules), level=DEBUG)
return settings.get(modules)
log("No overrides found for '%s'" % (modules), level=DEBUG)
else:
log("No hardening config overrides file '%s' found in charm "
"root dir" % (overrides), level=DEBUG)
return {}
def _apply_overrides(settings, overrides, schema):
"""Get overrides config overlayed onto modules defaults.
:param modules: require stack modules config.
:returns: dictionary of modules config with user overrides applied.
"""
if overrides:
for k, v in six.iteritems(overrides):
if k in schema:
if schema[k] is None:
settings[k] = v
elif type(schema[k]) is dict:
settings[k] = _apply_overrides(settings[k], overrides[k],
schema[k])
else:
raise Exception("Unexpected type found in schema '%s'" %
type(schema[k]), level=ERROR)
else:
log("Unknown override key '%s' - ignoring" % (k), level=INFO)
return settings
def get_settings(modules):
global __SETTINGS__
if modules in __SETTINGS__:
return __SETTINGS__[modules]
schema = _get_schema(modules)
settings = _get_defaults(modules)
overrides = _get_user_provided_overrides(modules)
__SETTINGS__[modules] = _apply_overrides(settings, overrides, schema)
return __SETTINGS__[modules]
def ensure_permissions(path, user, group, permissions, maxdepth=-1):
"""Ensure permissions for path.
If path is a file, apply to file and return. If path is a directory,
apply recursively (if required) to directory contents and return.
:param user: user name
:param group: group name
:param permissions: octal permissions
:param maxdepth: maximum recursion depth. A negative maxdepth allows
infinite recursion and maxdepth=0 means no recursion.
:returns: None
"""
if not os.path.exists(path):
log("File '%s' does not exist - cannot set permissions" % (path),
level=WARNING)
return
_user = pwd.getpwnam(user)
os.chown(path, _user.pw_uid, grp.getgrnam(group).gr_gid)
os.chmod(path, permissions)
if maxdepth == 0:
log("Max recursion depth reached - skipping further recursion",
level=DEBUG)
return
elif maxdepth > 0:
maxdepth -= 1
if os.path.isdir(path):
contents = glob.glob("%s/*" % (path))
for c in contents:
ensure_permissions(c, user=user, group=group,
permissions=permissions, maxdepth=maxdepth)

View File

@ -156,7 +156,7 @@ class OpenStackAmuletDeployment(AmuletDeployment):
use_source = list(set(
use_source + ['mysql', 'mongodb', 'rabbitmq-server', 'ceph',
'ceph-osd', 'ceph-radosgw', 'ceph-mon',
'ceph-proxy', 'percona-cluster']))
'ceph-proxy', 'percona-cluster', 'lxd']))
# Charms which can not use openstack-origin, ie. many subordinates
no_origin = list(set(

View File

@ -28,6 +28,7 @@ import keystoneclient.v2_0 as keystone_client
from keystoneclient.auth.identity import v3 as keystone_id_v3
from keystoneclient import session as keystone_session
from keystoneclient.v3 import client as keystone_client_v3
from novaclient import exceptions
import novaclient.client as nova_client
import pika
@ -315,7 +316,6 @@ class OpenStackAmuletUtils(AmuletUtils):
keystone_ip=None):
"""Authenticates admin user with the keystone admin endpoint."""
self.log.debug('Authenticating keystone admin...')
unit = keystone_sentry
if not keystone_ip:
keystone_ip = keystone_sentry.info['public-address']
@ -378,6 +378,16 @@ class OpenStackAmuletUtils(AmuletUtils):
tenant_name=tenant,
auth_version='2.0')
def create_flavor(self, nova, name, ram, vcpus, disk, flavorid="auto",
ephemeral=0, swap=0, rxtx_factor=1.0, is_public=True):
"""Create the specified flavor."""
try:
nova.flavors.find(name=name)
except (exceptions.NotFound, exceptions.NoUniqueMatch):
self.log.debug('Creating flavor ({})'.format(name))
nova.flavors.create(name, ram, vcpus, disk, flavorid,
ephemeral, swap, rxtx_factor, is_public)
def create_cirros_image(self, glance, image_name):
"""Download the latest cirros image and upload it to glance,
validate and return a resource pointer.

View File

@ -644,7 +644,7 @@ class ApacheSSLContext(OSContextGenerator):
service_namespace = None
def enable_modules(self):
cmd = ['a2enmod', 'ssl', 'proxy', 'proxy_http']
cmd = ['a2enmod', 'ssl', 'proxy', 'proxy_http', 'headers']
check_call(cmd)
def configure_cert(self, cn=None):

View File

@ -13,6 +13,7 @@ Listen {{ ext_port }}
ProxyPass / http://localhost:{{ int }}/
ProxyPassReverse / http://localhost:{{ int }}/
ProxyPreserveHost on
RequestHeader set X-Forwarded-Proto "https"
</VirtualHost>
{% endfor -%}
<Proxy *>

View File

@ -13,6 +13,7 @@ Listen {{ ext_port }}
ProxyPass / http://localhost:{{ int }}/
ProxyPassReverse / http://localhost:{{ int }}/
ProxyPreserveHost on
RequestHeader set X-Forwarded-Proto "https"
</VirtualHost>
{% endfor -%}
<Proxy *>

View File

@ -229,6 +229,7 @@ GIT_DEFAULT_REPOS = {
GIT_DEFAULT_BRANCHES = {
'liberty': 'stable/liberty',
'mitaka': 'stable/mitaka',
'newton': 'stable/newton',
'master': 'master',
}
@ -409,14 +410,26 @@ def get_os_version_package(pkg, fatal=True):
os_rel = None
def os_release(package, base='essex'):
def reset_os_release():
'''Unset the cached os_release version'''
global os_rel
os_rel = None
def os_release(package, base='essex', reset_cache=False):
'''
Returns OpenStack release codename from a cached global.
If reset_cache then unset the cached os_release version and return the
freshly determined version.
If the codename can not be determined from either an installed package or
the installation source, the earliest release supported by the charm should
be returned.
'''
global os_rel
if reset_cache:
reset_os_release()
if os_rel:
return os_rel
os_rel = (git_os_codename_install_source(config('openstack-origin-git')) or
@ -735,12 +748,12 @@ def git_os_codename_install_source(projects_yaml):
if projects in GIT_DEFAULT_BRANCHES.keys():
if projects == 'master':
return 'newton'
return 'ocata'
return projects
if 'release' in projects:
if projects['release'] == 'master':
return 'newton'
return 'ocata'
return projects['release']
return None

View File

@ -332,6 +332,8 @@ def config(scope=None):
config_cmd_line = ['config-get']
if scope is not None:
config_cmd_line.append(scope)
else:
config_cmd_line.append('--all')
config_cmd_line.append('--format=json')
try:
config_data = json.loads(

View File

@ -48,7 +48,7 @@ proxy = REMOTEProxy(user=config('remote-user'),
password=config('remote-password'))
@hooks.hook()
@hooks.hook('install.real')
def install():
apt_install(['fabric'], fatal=True)
proxy.install()
@ -130,6 +130,12 @@ def nova_ceilometer_relation_changed():
proxy.commit()
@hooks.hook('update-status')
def update_status():
log('Updating status.')
assess_status(CONFIGS)
if __name__ == '__main__':
try:
hooks.execute(sys.argv)

View File

@ -72,6 +72,8 @@ class REMOTEProxy():
def __init__(self, user, ssh_key, hosts,
repository, password):
if None in [user, ssh_key, hosts, repository]:
# XXX: Charm should block instead.
# https://bugs.launchpad.net/bugs/1638772
raise Exception('Missing configuration')
self.user = user
self.ssh_key = ssh_key

View File

@ -1,12 +1,2 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
pbr>=1.8.0,<1.9.0
PyYAML>=3.1.0
simplejson>=2.2.0
netifaces>=0.10.4
netaddr>=0.7.12,!=0.7.16
Jinja2>=2.6 # BSD License (3 clause)
six>=1.9.0
dnspython>=1.12.0
psutil>=1.1.1,<2.0.0
# Place all python test dependencies in test-requirements.txt.
# This is intentionally blank.

View File

@ -1,12 +1,24 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
pbr>=1.8.0,<1.9.0
PyYAML>=3.1.0
simplejson>=2.2.0
netifaces>=0.10.4
netaddr>=0.7.12,!=0.7.16
Jinja2>=2.6 # BSD License (3 clause)
six>=1.9.0
dnspython>=1.12.0
psutil>=1.1.1,<2.0.0
coverage>=3.6
mock>=1.2
flake8>=2.2.4,<=2.4.1
os-testr>=0.4.1
charm-tools>=2.0.0
requests==2.6.0
bzr+lp:charm-helpers#egg=charmhelpers
#bzr+lp:~1chb1n/charm-helpers/update-egg#egg=charmhelpers
git+https://git.launchpad.net/juju-wait
# BEGIN: Amulet OpenStack Charm Helper Requirements
# Liberty client lower constraints
amulet>=1.14.3,<2.0
@ -15,6 +27,8 @@ python-ceilometerclient>=1.5.0,<2.0
python-cinderclient>=1.4.0,<2.0
python-glanceclient>=1.1.0,<2.0
python-heatclient>=0.8.0,<1.0
python-keystoneclient>=1.7.1,<2.0
python-neutronclient>=3.1.0,<4.0
python-novaclient>=2.30.1,<3.0
python-openstackclient>=1.7.0,<2.0
python-swiftclient>=2.6.0,<3.0

9
tests/README.md Normal file
View File

@ -0,0 +1,9 @@
# Overview
This directory provides Amulet tests to verify basic deployment functionality
from the perspective of this charm, its requirements and its features, as
exercised in a subset of the full OpenStack deployment test bundle topology.
For full details on functional testing of OpenStack charms please refer to
the [functional testing](http://docs.openstack.org/developer/charm-guide/testing.html#functional-testing)
section of the OpenStack Charm Guide.

454
tests/basic_deployment.py Normal file
View File

@ -0,0 +1,454 @@
# -*- coding: utf-8 -*-
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import amulet
import juju_wait
from charmhelpers.contrib.openstack.amulet.deployment import (
OpenStackAmuletDeployment
)
from charmhelpers.contrib.openstack.amulet.utils import (
OpenStackAmuletUtils,
DEBUG,
# ERROR
)
from novaclient import exceptions
class NovaOpenStackAmuletUtils(OpenStackAmuletUtils):
"""Nova based helper extending base helper for creation of flavors"""
def create_flavor(self, nova, name, ram, vcpus, disk, flavorid="auto",
ephemeral=0, swap=0, rxtx_factor=1.0, is_public=True):
"""Create the specified flavor."""
try:
nova.flavors.find(name=name)
except (exceptions.NotFound, exceptions.NoUniqueMatch):
self.log.debug('Creating flavor ({})'.format(name))
nova.flavors.create(name, ram, vcpus, disk, flavorid,
ephemeral, swap, rxtx_factor, is_public)
# Use DEBUG to turn on debug logging
u = NovaOpenStackAmuletUtils(DEBUG)
class NovaBasicDeployment(OpenStackAmuletDeployment):
"""Amulet tests on a basic nova compute proxy deployment."""
def __init__(self, series=None, openstack=None, source=None,
git=False, stable=False):
"""Deploy the entire test environment."""
super(NovaBasicDeployment, self).__init__(series, openstack,
source, stable)
self._pre_deploy_remote_compute()
self._add_services()
self._add_relations()
self._configure_services()
self._deploy()
u.log.info('Waiting on extended status checks...')
# NOTE: nova-compute-proxy hangs blocked on neutron relation when
# simulating the remote compute host.
self.exclude_services = ['nova-compute-proxy']
self._auto_wait_for_status(exclude_services=self.exclude_services)
self._initialize_tests()
def _pre_deploy_remote_compute(self):
"""Add a simulated remote machine ahead of the actual deployment.
This is done outside of Amulet because Amulet only supports one
deploy call, and the public-address of the remote-compute is
needed as a charm config option on the nova-compute-proxy charm.
In a production scenario, the remote compute machine is up and
running before the control plane is deployed. This simulates that."""
# Deploy simulated remote-compute host if not already deployed
cmd = ['juju', 'status', 'remote-compute']
compute_deployed = 'remote-compute:' in \
subprocess.check_output(cmd).decode('UTF-8')
if not compute_deployed:
u.log.debug('Pre-deploying a simulated remote-compute unit')
cmd = ['juju', 'deploy', 'ubuntu', 'remote-compute']
subprocess.check_call(cmd)
u.log.debug('Using juju_wait to wait for remote-compute deployment')
juju_wait.wait(max_wait=900)
# Discover IP address of remote-compute unit
cmd = ['juju', 'run', '--service',
'remote-compute', 'unit-get public-address']
self.compute_addr = \
subprocess.check_output(cmd).decode('UTF-8').strip()
u.log.debug('Simulated remote compute address: '
'{}'.format(self.compute_addr))
# Remove local test keys if they exist
key_files = ['id_rsa_tmp', 'id_rsa_tmp.pub']
for key_file in key_files:
key_file_path = os.path.join('files', key_file)
if os.path.exists(key_file_path):
u.log.debug('Removing file: {}'.format(key_file_path))
os.remove(key_file_path)
# Create a new local test key
u.log.debug('Generating new test ssh keys')
cmd = ['ssh-keygen', '-t', 'rsa', '-b', '4096', '-C',
'demo@local', '-f', 'files/id_rsa_tmp', '-q', '-N', '']
subprocess.check_call(cmd)
for key_file in key_files:
key_file_path = os.path.join('files', key_file)
if not os.path.exists(key_file_path):
raise
# Copy new local test pub key into remote-compute and
# add it to the authorized_hosts.
u.log.debug('Copying pub key into simulated remote-compute host')
src_file = os.path.join('files', 'id_rsa_tmp.pub')
dst_file = os.path.join(os.sep, 'home', 'ubuntu', 'id_rsa_tmp.pub')
auth_file = os.path.join(os.sep, 'home', 'ubuntu',
'.ssh', 'authorized_keys')
cmd = ['juju', 'scp', src_file,
'ubuntu@{}:{}'.format(self.compute_addr, dst_file)]
subprocess.check_call(cmd)
u.log.debug('Adding pub key to authorized_hosts on the simulated '
'remote-compute host')
cmd = ['juju', 'ssh', 'ubuntu@{}'.format(self.compute_addr),
'cat {} >> {}'.format(dst_file, auth_file)]
subprocess.check_call(cmd)
u.log.debug('Installing and enabling yum on remote compute host')
cmd = ['juju', 'ssh', 'ubuntu@{}'.format(self.compute_addr),
'sudo apt-get install yum yum-utils -y']
subprocess.check_call(cmd)
cmd = ['juju', 'ssh', 'ubuntu@{}'.format(self.compute_addr),
'sudo yum-config-manager --enable']
subprocess.check_call(cmd)
u.log.debug('Remote compute host deploy and prep complete')
def _add_services(self):
"""Add services
Add the services under test, where nova-compute-proxy is local,
and the rest of the service are from lp branches that are
compatible with the local charm (e.g. stable or next).
"""
this_service = {
'name': 'nova-compute-proxy',
}
other_services = [
{'name': 'rabbitmq-server'},
{'name': 'nova-cloud-controller'},
{'name': 'keystone'},
{'name': 'glance'},
{'name': 'neutron-api'},
{'name': 'neutron-gateway'},
{'name': 'percona-cluster', 'constraints': {'mem': '3072M'}},
]
super(NovaBasicDeployment, self)._add_services(this_service,
other_services)
def _add_relations(self):
"""Add all of the relations for the services."""
relations = {
'nova-compute-proxy:image-service': 'glance:image-service',
'nova-compute-proxy:amqp': 'rabbitmq-server:amqp',
'nova-compute-proxy:cloud-compute': 'nova-cloud-controller:'
'cloud-compute',
'nova-compute-proxy:neutron-plugin-api': 'neutron-api:'
'neutron-plugin-api',
'nova-cloud-controller:shared-db': 'percona-cluster:shared-db',
'nova-cloud-controller:identity-service': 'keystone:'
'identity-service',
'nova-cloud-controller:amqp': 'rabbitmq-server:amqp',
'nova-cloud-controller:image-service': 'glance:image-service',
'keystone:shared-db': 'percona-cluster:shared-db',
'glance:identity-service': 'keystone:identity-service',
'glance:shared-db': 'percona-cluster:shared-db',
'glance:amqp': 'rabbitmq-server:amqp',
'neutron-gateway:amqp': 'rabbitmq-server:amqp',
'neutron-api:shared-db': 'percona-cluster:shared-db',
'neutron-api:amqp': 'rabbitmq-server:amqp',
'neutron-api:neutron-api': 'nova-cloud-controller:neutron-api',
'neutron-api:neutron-plugin-api': 'neutron-gateway:'
'neutron-plugin-api',
'neutron-api:identity-service': 'keystone:identity-service',
}
super(NovaBasicDeployment, self)._add_relations(relations)
def _configure_services(self):
"""Configure all of the services."""
nova_config = {
'remote-user': 'ubuntu',
'remote-repos': "file:///mnt/osmitakacomp,file:///mnt/osprereqs",
'remote-key': 'id_rsa_tmp',
'remote-hosts': str(self.compute_addr),
}
nova_cc_config = {}
keystone_config = {
'admin-password': 'openstack',
'admin-token': 'ubuntutesting',
}
pxc_config = {
'dataset-size': '25%',
'max-connections': 1000,
'root-password': 'ChangeMe123',
'sst-password': 'ChangeMe123',
}
configs = {
'nova-compute-proxy': nova_config,
'keystone': keystone_config,
'nova-cloud-controller': nova_cc_config,
'percona-cluster': pxc_config,
}
super(NovaBasicDeployment, self)._configure_services(configs)
def _initialize_tests(self):
"""Perform final initialization before tests get run."""
# Access the sentries for inspecting service units
self.pxc_sentry = self.d.sentry['percona-cluster'][0]
self.keystone_sentry = self.d.sentry['keystone'][0]
self.rabbitmq_sentry = self.d.sentry['rabbitmq-server'][0]
self.compute_sentry = self.d.sentry['nova-compute-proxy'][0]
self.nova_cc_sentry = self.d.sentry['nova-cloud-controller'][0]
self.glance_sentry = self.d.sentry['glance'][0]
u.log.debug('openstack release val: {}'.format(
self._get_openstack_release()))
u.log.debug('openstack release str: {}'.format(
self._get_openstack_release_string()))
# Authenticate admin with keystone
self.keystone = u.authenticate_keystone_admin(self.keystone_sentry,
user='admin',
password='openstack',
tenant='admin')
# Authenticate admin with glance endpoint
self.glance = u.authenticate_glance_admin(self.keystone)
# Authenticate admin with nova endpoint
self.nova = u.authenticate_nova_user(self.keystone,
user='admin',
password='openstack',
tenant='admin')
# Create a demo tenant/role/user
self.demo_tenant = 'demoTenant'
self.demo_role = 'demoRole'
self.demo_user = 'demoUser'
if not u.tenant_exists(self.keystone, self.demo_tenant):
tenant = self.keystone.tenants.create(tenant_name=self.demo_tenant,
description='demo tenant',
enabled=True)
self.keystone.roles.create(name=self.demo_role)
self.keystone.users.create(name=self.demo_user,
password='password',
tenant_id=tenant.id,
email='demo@demo.com')
# Authenticate demo user with keystone
self.keystone_demo = \
u.authenticate_keystone_user(self.keystone, user=self.demo_user,
password='password',
tenant=self.demo_tenant)
# Authenticate demo user with nova-api
self.nova_demo = u.authenticate_nova_user(self.keystone,
user=self.demo_user,
password='password',
tenant=self.demo_tenant)
def test_100_service_catalog(self):
"""Verify endpoints exist in the service catalog"""
u.log.debug('Verifying endpoints exist in the service catalog')
ep_validate = {
'adminURL': u.valid_url,
'region': 'RegionOne',
'publicURL': u.valid_url,
'internalURL': u.valid_url,
'id': u.not_null,
}
expected = {
'image': [ep_validate],
'compute': [ep_validate],
'network': [ep_validate],
'identity': [ep_validate],
}
actual = self.keystone_demo.service_catalog.get_endpoints()
ret = u.validate_svc_catalog_endpoint_data(expected, actual)
if ret:
amulet.raise_status(amulet.FAIL, msg=ret)
def test_200_ncp_ncc_relation(self):
"""Verify the ncp:nova-cloud-controller cloud-compute relation data"""
u.log.debug('Checking ncp to rmq cloud-compute relation data...')
unit = self.compute_sentry
relation = ['cloud-compute', 'nova-cloud-controller:cloud-compute']
expected = {
'private-address': u.valid_ip,
}
ret = u.validate_relation_data(unit, relation, expected)
if ret:
message = u.relation_error('ncp cloud-compute', ret)
amulet.raise_status(amulet.FAIL, msg=message)
def test_202_ncp_neutron_relation(self):
"""Verify the ncp:neutron-api neutron-plugin-api relation data"""
u.log.debug('Checking ncp to rmq neutron-plugin-api relation data...')
unit = self.compute_sentry
relation = ['neutron-plugin-api', 'neutron-api:neutron-plugin-api']
expected = {
'private-address': u.valid_ip,
}
ret = u.validate_relation_data(unit, relation, expected)
if ret:
message = u.relation_error('ncp neutron-plugin-api', ret)
amulet.raise_status(amulet.FAIL, msg=message)
def test_204_ncp_rabbitmq_amqp_relation(self):
"""Verify the ncp:rabbitmq-server amqp relation data"""
u.log.debug('Checking ncp to rmq amqp relation data...')
unit = self.compute_sentry
relation = ['amqp', 'rabbitmq-server:amqp']
expected = {
'private-address': u.valid_ip,
'vhost': 'openstack',
'username': 'nova',
}
ret = u.validate_relation_data(unit, relation, expected)
if ret:
message = u.relation_error('ncp amqp', ret)
amulet.raise_status(amulet.FAIL, msg=message)
def test_206_ncp_glance_image_relation(self):
"""Verify the ncp:glance image relation data"""
u.log.debug('Checking ncp to rmq image relation data...')
unit = self.compute_sentry
relation = ['image-service', 'glance:image-service']
expected = {
'private-address': u.valid_ip,
}
ret = u.validate_relation_data(unit, relation, expected)
if ret:
message = u.relation_error('ncp image', ret)
amulet.raise_status(amulet.FAIL, msg=message)
def test_300_scratch_nova_config(self):
"""Verify data in the scratch nova config file on the proxy unit."""
u.log.debug('Checking scratch nova config files on the proxy unit...')
unit = self.compute_sentry
conf = '/var/lib/charm/nova-compute-proxy/etc/nova/nova.conf'
rmq_nc_rel = self.rabbitmq_sentry.relation(
'amqp', 'nova-compute-proxy:amqp')
gl_nc_rel = self.glance_sentry.relation(
'image-service', 'nova-compute-proxy:image-service')
serial_base_url = 'ws://{}:6083/'.format(
self.nova_cc_sentry.info['public-address'])
expected = {
'DEFAULT': {
'logdir': '/var/log/nova',
'state_path': '/var/lib/nova',
'debug': 'False',
'use_syslog': 'False',
'auth_strategy': 'keystone',
'enabled_apis': 'osapi_compute,metadata',
'network_manager': 'nova.network.manager.FlatDHCPManager',
'volume_api_class': 'nova.volume.cinder.API',
'reserved_host_memory': '512',
'my_ip': 'LOCAL_IP',
},
'oslo_concurrency': {
'lock_path': '/var/lock/nova'
},
'oslo_messaging_rabbit': {
'rabbit_userid': 'nova',
'rabbit_virtual_host': 'openstack',
'rabbit_password': rmq_nc_rel['password'],
'rabbit_host': rmq_nc_rel['hostname'],
},
'glance': {
'api_servers': gl_nc_rel['glance-api-server']
},
'serial_console': {
'enabled': 'false',
'base_url': serial_base_url,
},
'vnc': {
'enabled': 'False',
},
}
for section, pairs in expected.iteritems():
ret = u.validate_config_data(unit, conf, section, pairs)
if ret:
message = "nova config error: {}".format(ret)
amulet.raise_status(amulet.FAIL, msg=message)
def test_302_scratch_neutron_config(self):
"""Verify data in the scratch config file on the proxy unit."""
# TODO: check conf data
pass
def test_304_scratch_ovs_agent_ml2_config(self):
"""Verify data in the scratch config file on the proxy unit."""
# TODO: check conf data
pass
# TODO: check charm scratch dir files and contents
# root@juju-0efa4c-1-lxd-7:/var/lib/charm# tree
# .
# <20> nova-compute-proxy
# <20> etc
# neutron
# <20> neutron.conf
# <20> <20> plugins
# <20> <20> ml2
# <20> <20> openvswitch_agent.ini
# <20> nova
# <20> nova.conf
# /!\ More tests needed.
# TODO: check that yum repo files are created and contain the expected info
# Executing task 'copy_file_as_root'
# put: /tmp/tmpOTUKXw -> /etc/yum.repos.d/openstack-nova-compute-proxy-1.repo
# Executing task 'copy_file_as_root'
# put: /tmp/tmpfQREor -> /etc/yum.repos.d/openstack-nova-compute-proxy-2.repo
# Executing task 'yum_install'
# sudo: yum install --skip-broken -y openstack-nova-compute openstack-neutron-openvswitch python-neutronclient # noqa
# out: /bin/bash: yum: command not found

View File

@ -0,0 +1,25 @@
#!/usr/bin/env python
#
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Amulet tests on a basic nova compute deployment on trusty-liberty."""
from basic_deployment import NovaBasicDeployment
if __name__ == '__main__':
deployment = NovaBasicDeployment(series='trusty',
openstack='cloud:trusty-liberty',
source='cloud:trusty-updates/liberty')
deployment.run_tests()

View File

@ -0,0 +1,25 @@
#!/usr/bin/env python
#
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Amulet tests on a basic nova compute deployment on trusty-mitaka."""
from basic_deployment import NovaBasicDeployment
if __name__ == '__main__':
deployment = NovaBasicDeployment(series='trusty',
openstack='cloud:trusty-mitaka',
source='cloud:trusty-updates/mitaka')
deployment.run_tests()

23
tests/gate-basic-xenial-mitaka Executable file
View File

@ -0,0 +1,23 @@
#!/usr/bin/env python
#
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Amulet tests on a basic nova compute deployment on xenial-mitaka."""
from basic_deployment import NovaBasicDeployment
if __name__ == '__main__':
deployment = NovaBasicDeployment(series='xenial')
deployment.run_tests()

View File

@ -0,0 +1,25 @@
#!/usr/bin/env python
#
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Amulet tests on a basic nova compute deployment on xenial-newton."""
from basic_deployment import NovaBasicDeployment
if __name__ == '__main__':
deployment = NovaBasicDeployment(series='xenial',
openstack='cloud:xenial-newton',
source='cloud:xenial-updates/newton')
deployment.run_tests()

View File

@ -0,0 +1,23 @@
#!/usr/bin/env python
#
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Amulet tests on a basic nova compute deployment on xenial-mitaka."""
from basic_deployment import NovaBasicDeployment
if __name__ == '__main__':
deployment = NovaBasicDeployment(series='yakkety')
deployment.run_tests()

17
tests/tests.yaml Normal file
View File

@ -0,0 +1,17 @@
# Bootstrap the model if necessary.
bootstrap: True
# Re-use bootstrap node.
reset: True
# Use tox/requirements to drive the venv instead of bundletester's venv feature.
virtualenv: False
# Leave makefile empty, otherwise unit/lint tests will rerun ahead of amulet.
makefile: []
# Do not specify juju PPA sources. Juju is presumed to be pre-installed
# and configured in all test runner environments.
#sources:
# Do not specify or rely on system packages.
#packages:
# Do not specify python packages here. Use test-requirements.txt
# and tox instead. ie. The venv is constructed before bundletester
# is invoked.
#python-packages:

View File

@ -19,8 +19,6 @@ passenv = HOME TERM AMULET_*
[testenv:py27]
basepython = python2.7
whitelist_externals = echo
commands =
echo "No unit tests for this charm"
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt

18
unit_tests/__init__.py Normal file
View File

@ -0,0 +1,18 @@
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append('actions/')
sys.path.append('hooks/')

View File

@ -0,0 +1,148 @@
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import patch
from test_utils import CharmTestCase
import nova_compute_context as context
TO_PATCH = [
'relation_ids',
'relation_get',
'related_units',
'config',
'log',
]
NEUTRON_CONTEXT = {
'network_manager': 'neutron',
'quantum_auth_strategy': 'keystone',
'keystone_host': 'keystone_host',
'auth_port': '5000',
'auth_protocol': 'https',
'quantum_url': 'http://quantum_url',
'service_tenant_name': 'admin',
'service_username': 'admin',
'service_password': 'openstack',
'quantum_security_groups': 'yes',
'quantum_plugin': 'ovs',
'auth_host': 'keystone_host',
}
def fake_log(msg, level=None):
level = level or 'INFO'
print('[juju test log ({})] {}'.format(level, msg))
class FakeUnitdata(object):
def __init__(self, **kwargs):
self.unit_data = {}
for name, value in kwargs.items():
self.unit_data[name] = value
def get(self, key, default=None, record=False):
return self.unit_data.get(key)
def set(self, key, value):
self.unit_data[key] = value
def flush(self):
pass
class NovaComputeContextTests(CharmTestCase):
def setUp(self):
super(NovaComputeContextTests, self).setUp(context, TO_PATCH)
self.relation_get.side_effect = self.test_relation.get
self.config.side_effect = self.test_config.get
self.log.side_effect = fake_log
self.host_uuid = 'e46e530d-18ae-4a67-9ff0-e6e2ba7c60a7'
self.maxDiff = None
def test_cloud_compute_context_no_relation(self):
self.relation_ids.return_value = []
cloud_compute = context.CloudComputeContext()
self.assertEquals({}, cloud_compute())
@patch.object(context, '_network_manager')
def test_cloud_compute_context_restart_trigger(self, nm):
nm.return_value = None
cloud_compute = context.CloudComputeContext()
with patch.object(cloud_compute, 'restart_trigger') as rt:
rt.return_value = 'footrigger'
ctxt = cloud_compute()
self.assertEquals(ctxt.get('restart_trigger'), 'footrigger')
with patch.object(cloud_compute, 'restart_trigger') as rt:
rt.return_value = None
ctxt = cloud_compute()
self.assertEquals(ctxt.get('restart_trigger'), None)
@patch.object(context, '_network_manager')
def test_cloud_compute_volume_context_cinder(self, netman):
netman.return_value = None
self.relation_ids.return_value = 'cloud-compute:0'
self.related_units.return_value = 'nova-cloud-controller/0'
cloud_compute = context.CloudComputeContext()
self.test_relation.set({'volume_service': 'cinder'})
self.assertEquals({'volume_service': 'cinder'}, cloud_compute())
class SerialConsoleContextTests(CharmTestCase):
def setUp(self):
super(SerialConsoleContextTests, self).setUp(context, TO_PATCH)
self.relation_get.side_effect = self.test_relation.get
self.config.side_effect = self.test_config.get
self.host_uuid = 'e46e530d-18ae-4a67-9ff0-e6e2ba7c60a7'
def test_serial_console_disabled(self):
self.relation_ids.return_value = ['cloud-compute:0']
self.related_units.return_value = 'nova-cloud-controller/0'
self.test_relation.set({
'enable_serial_console': 'false',
})
self.assertEqual(
context.SerialConsoleContext()(),
{'enable_serial_console': 'false',
'serial_console_base_url': 'ws://127.0.0.1:6083/'}
)
def test_serial_console_not_provided(self):
self.relation_ids.return_value = ['cloud-compute:0']
self.related_units.return_value = 'nova-cloud-controller/0'
self.test_relation.set({
'enable_serial_console': None,
})
self.assertEqual(
context.SerialConsoleContext()(),
{'enable_serial_console': 'false',
'serial_console_base_url': 'ws://127.0.0.1:6083/'}
)
def test_serial_console_provided(self):
self.relation_ids.return_value = ['cloud-compute:0']
self.related_units.return_value = 'nova-cloud-controller/0'
self.test_relation.set({
'enable_serial_console': 'true',
'serial_console_base_url': 'ws://10.10.10.1:6083/'
})
self.assertEqual(
context.SerialConsoleContext()(),
{'enable_serial_console': 'true',
'serial_console_base_url': 'ws://10.10.10.1:6083/'}
)

138
unit_tests/test_utils.py Normal file
View File

@ -0,0 +1,138 @@
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
import os
import yaml
from contextlib import contextmanager
from mock import patch, MagicMock
patch('charmhelpers.contrib.openstack.utils.set_os_workload_status').start()
patch('charmhelpers.core.hookenv.status_set').start()
def load_config():
'''
Walk backwords from __file__ looking for config.yaml, load and return the
'options' section'
'''
config = None
f = __file__
while config is None:
d = os.path.dirname(f)
if os.path.isfile(os.path.join(d, 'config.yaml')):
config = os.path.join(d, 'config.yaml')
break
f = d
if not config:
logging.error('Could not find config.yaml in any parent directory '
'of %s. ' % file)
raise Exception
return yaml.safe_load(open(config).read())['options']
def get_default_config():
'''
Load default charm config from config.yaml return as a dict.
If no default is set in config.yaml, its value is None.
'''
default_config = {}
config = load_config()
for k, v in config.iteritems():
if 'default' in v:
default_config[k] = v['default']
else:
default_config[k] = None
return default_config
class CharmTestCase(unittest.TestCase):
def setUp(self, obj, patches):
super(CharmTestCase, self).setUp()
self.patches = patches
self.obj = obj
self.test_config = TestConfig()
self.test_relation = TestRelation()
self.patch_all()
def patch(self, method):
_m = patch.object(self.obj, method)
mock = _m.start()
self.addCleanup(_m.stop)
return mock
def patch_all(self):
for method in self.patches:
setattr(self, method, self.patch(method))
class TestConfig(object):
def __init__(self):
self.config = get_default_config()
def get(self, attr=None):
if not attr:
return self.get_all()
try:
return self.config[attr]
except KeyError:
return None
def get_all(self):
return self.config
def set(self, attr, value):
if attr not in self.config:
raise KeyError
self.config[attr] = value
class TestRelation(object):
def __init__(self, relation_data={}):
self.relation_data = relation_data
def set(self, relation_data):
self.relation_data = relation_data
def get(self, attr=None, unit=None, rid=None):
if attr is None:
return self.relation_data
elif attr in self.relation_data:
return self.relation_data[attr]
return None
@contextmanager
def patch_open():
'''Patch open() to allow mocking both open() itself and the file that is
yielded.
Yields the mock for "open" and "file", respectively.'''
mock_open = MagicMock(spec=open)
mock_file = MagicMock(spec=file)
@contextmanager
def stub_open(*args, **kwargs):
mock_open(*args, **kwargs)
yield mock_file
with patch('__builtin__.open', stub_open):
yield mock_open, mock_file