Implement guestagent Configuration Manager
Facilitate code reuse by implementing a manager class that could be used by all guestagents to manage their configuration files and overrides. ConfigurationManager is responsible for management of datastore configuration. Its base functionality includes reading and writing configuration files. It is responsible for validating user inputs and requests. When supplied an override strategy it allows the user to manage configuration overrides as well. ConfigurationOverrideStrategy handles configuration files. The strategy provides functionality to enumerate, apply and remove configuration overrides (revisions). The patch set also includes functionality for reading and writing files and implements codecs for serialization and deserialization of common configuration formats. The implemented codecs get reused in the existing configuration parsers. Includes a couple of little fixes to the taskmanager that will be required by other datastores. - Do not validate value ranges if min/max is not specified in the validation rules. - Do not attempt to parse non-string configuration values in the taskmanager. Implements: blueprint guestagent-configuration-manager Change-Id: I1c940c96deb20ca722d9fd400a6ef757b2ba249f
This commit is contained in:
parent
fda9081090
commit
d37a99e584
|
@ -13,37 +13,18 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import io
|
||||
from six.moves import configparser
|
||||
from trove.common import stream_codecs
|
||||
|
||||
|
||||
class MySQLConfParser(object):
|
||||
"""MySQLConfParser"""
|
||||
|
||||
CODEC = stream_codecs.IniCodec(
|
||||
default_value='1', comment_markers=('#', ';', '!'))
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def parse(self):
|
||||
good_cfg = self._remove_commented_lines(str(self.config))
|
||||
cfg_parser = configparser.ConfigParser()
|
||||
cfg_parser.readfp(io.BytesIO(str(good_cfg)))
|
||||
return cfg_parser.items("mysqld")
|
||||
|
||||
def _remove_commented_lines(self, config_str):
|
||||
ret = []
|
||||
for line in config_str.splitlines():
|
||||
line_clean = line.strip()
|
||||
if line_clean.startswith('#'):
|
||||
continue
|
||||
elif line_clean.startswith('!'):
|
||||
continue
|
||||
elif line_clean.startswith(';'):
|
||||
continue
|
||||
# python 2.6 ConfigParser doesn't like params without values
|
||||
elif line_clean.startswith('[') and line_clean.endswith(']'):
|
||||
ret.append(line_clean)
|
||||
elif line_clean and "=" not in line_clean:
|
||||
ret.append(line_clean + " = 1")
|
||||
else:
|
||||
ret.append(line_clean)
|
||||
rendered = "\n".join(ret)
|
||||
return rendered
|
||||
config_dict = self.CODEC.deserialize(self.config)
|
||||
mysqld_section_dict = config_dict['mysqld']
|
||||
return mysqld_section_dict.items()
|
||||
|
|
|
@ -0,0 +1,346 @@
|
|||
# Copyright 2015 Tesora Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
import ast
|
||||
import csv
|
||||
import six
|
||||
import StringIO
|
||||
import yaml
|
||||
|
||||
from ConfigParser import SafeConfigParser
|
||||
|
||||
from trove.common import utils as trove_utils
|
||||
|
||||
|
||||
class StringConverter(object):
|
||||
"""A passthrough string-to-object converter.
|
||||
"""
|
||||
|
||||
def __init__(self, object_mappings):
|
||||
"""
|
||||
:param object_mappings: string-to-object mappings
|
||||
:type object_mappings: dict
|
||||
"""
|
||||
self._object_mappings = object_mappings
|
||||
|
||||
def to_strings(self, items):
|
||||
"""Recursively convert collection items to strings.
|
||||
|
||||
:returns: Copy of the input collection with all items converted.
|
||||
"""
|
||||
if trove_utils.is_collection(items):
|
||||
return map(self.to_strings, items)
|
||||
|
||||
return self._to_string(items)
|
||||
|
||||
def to_objects(self, items):
|
||||
"""Recursively convert collection string to objects.
|
||||
|
||||
:returns: Copy of the input collection with all items converted.
|
||||
"""
|
||||
if trove_utils.is_collection(items):
|
||||
return map(self.to_objects, items)
|
||||
|
||||
return self._to_object(items)
|
||||
|
||||
def _to_string(self, value):
|
||||
for k, v in self._object_mappings.items():
|
||||
if v is value:
|
||||
return k
|
||||
|
||||
return str(value)
|
||||
|
||||
def _to_object(self, value):
|
||||
if value in self._object_mappings:
|
||||
return self._object_mappings[value]
|
||||
|
||||
try:
|
||||
return ast.literal_eval(value)
|
||||
except Exception:
|
||||
return value
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class StreamCodec(object):
|
||||
|
||||
@abc.abstractmethod
|
||||
def serialize(self, data):
|
||||
"""Serialize a Python object into a stream.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def deserialize(self, stream):
|
||||
"""Deserialize stream data into a Python structure.
|
||||
"""
|
||||
|
||||
|
||||
class IdentityCodec(StreamCodec):
|
||||
"""
|
||||
A basic passthrough codec.
|
||||
Does not modify the data in any way.
|
||||
"""
|
||||
|
||||
def serialize(self, data):
|
||||
return data
|
||||
|
||||
def deserialize(self, stream):
|
||||
return stream
|
||||
|
||||
|
||||
class YamlCodec(StreamCodec):
|
||||
"""
|
||||
Read/write data from/into a YAML config file.
|
||||
|
||||
a: 1
|
||||
b: {c: 3, d: 4}
|
||||
...
|
||||
|
||||
The above file content (flow-style) would be represented as:
|
||||
{'a': 1,
|
||||
'b': {'c': 3, 'd': 4,}
|
||||
...
|
||||
}
|
||||
"""
|
||||
|
||||
def __init__(self, default_flow_style=False):
|
||||
"""
|
||||
:param default_flow_style: Use flow-style (inline) formatting of
|
||||
nested collections.
|
||||
:type default_flow_style: boolean
|
||||
"""
|
||||
self._default_flow_style = default_flow_style
|
||||
|
||||
def serialize(self, dict_data):
|
||||
return yaml.dump(dict_data, Dumper=self.dumper,
|
||||
default_flow_style=self._default_flow_style)
|
||||
|
||||
def deserialize(self, stream):
|
||||
return yaml.load(stream, Loader=self.loader)
|
||||
|
||||
@property
|
||||
def loader(self):
|
||||
return yaml.loader.Loader
|
||||
|
||||
@property
|
||||
def dumper(self):
|
||||
return yaml.dumper.Dumper
|
||||
|
||||
|
||||
class SafeYamlCodec(YamlCodec):
|
||||
"""
|
||||
Same as YamlCodec except that it uses safe Loader and Dumper which
|
||||
encode Unicode strings and produce only basic YAML tags.
|
||||
"""
|
||||
|
||||
def __init__(self, default_flow_style=False):
|
||||
super(SafeYamlCodec, self).__init__(
|
||||
default_flow_style=default_flow_style)
|
||||
|
||||
@property
|
||||
def loader(self):
|
||||
return yaml.loader.SafeLoader
|
||||
|
||||
@property
|
||||
def dumper(self):
|
||||
return yaml.dumper.SafeDumper
|
||||
|
||||
|
||||
class IniCodec(StreamCodec):
|
||||
"""
|
||||
Read/write data from/into an ini-style config file.
|
||||
|
||||
[section_1]
|
||||
key = value
|
||||
key = value
|
||||
...
|
||||
|
||||
[section_2]
|
||||
key = value
|
||||
key = value
|
||||
...
|
||||
|
||||
The above file content would be represented as:
|
||||
{'section_1': {'key': 'value', 'key': 'value', ...},
|
||||
'section_2': {'key': 'value', 'key': 'value', ...}
|
||||
...
|
||||
}
|
||||
"""
|
||||
|
||||
def __init__(self, default_value=None, comment_markers=('#', ';')):
|
||||
"""
|
||||
:param default_value: Default value for keys with no value.
|
||||
If set, all keys are written as 'key = value'.
|
||||
The key is written without trailing '=' if None.
|
||||
:type default_value: string
|
||||
"""
|
||||
self._default_value = default_value
|
||||
self._comment_markers = comment_markers
|
||||
|
||||
def serialize(self, dict_data):
|
||||
parser = self._init_config_parser(dict_data)
|
||||
output = StringIO.StringIO()
|
||||
parser.write(output)
|
||||
|
||||
return output.getvalue()
|
||||
|
||||
def deserialize(self, stream):
|
||||
parser = self._init_config_parser()
|
||||
parser.readfp(self._pre_parse(stream))
|
||||
|
||||
return {s: {k: v if v is not None else self._default_value
|
||||
for k, v in parser.items(s, raw=True)}
|
||||
for s in parser.sections()}
|
||||
|
||||
def _pre_parse(self, stream):
|
||||
buf = StringIO.StringIO()
|
||||
for line in StringIO.StringIO(stream):
|
||||
# Ignore commented lines.
|
||||
if not line.startswith(self._comment_markers):
|
||||
# Strip leading and trailing whitespaces from each line.
|
||||
buf.write(line.strip() + '\n')
|
||||
|
||||
# Rewind the output buffer.
|
||||
buf.flush()
|
||||
buf.seek(0)
|
||||
|
||||
return buf
|
||||
|
||||
def _init_config_parser(self, sections=None):
|
||||
parser = SafeConfigParser(allow_no_value=True)
|
||||
if sections:
|
||||
for section in sections:
|
||||
parser.add_section(section)
|
||||
for key, value in sections[section].items():
|
||||
parser.set(section, key, value
|
||||
if value is not None else self._default_value)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
class PropertiesCodec(StreamCodec):
|
||||
"""
|
||||
Read/write data from/into a property-style config file.
|
||||
|
||||
key1 k1arg1 k1arg2 ... k1argN
|
||||
key2 k2arg1 k2arg2 ... k2argN
|
||||
key3 k3arg1 k3arg2 ...
|
||||
key3 k3arg3 k3arg4 ...
|
||||
...
|
||||
|
||||
The above file content would be represented as:
|
||||
{'key1': [k1arg1, k1arg2 ... k1argN],
|
||||
'key2': [k2arg1, k2arg2 ... k2argN]
|
||||
'key3': [[k3arg1, k3arg2, ...], [k3arg3, k3arg4, ...]]
|
||||
...
|
||||
}
|
||||
"""
|
||||
|
||||
QUOTING_MODE = csv.QUOTE_MINIMAL
|
||||
STRICT_MODE = False
|
||||
|
||||
def __init__(self, delimiter=' ', comment_markers=('#'),
|
||||
unpack_singletons=True, string_mappings={}):
|
||||
"""
|
||||
:param delimiter: A one-character used to separate fields.
|
||||
:type delimiter: string
|
||||
|
||||
:param empty_value: Value to represent None in the output.
|
||||
:type empty_value: object
|
||||
|
||||
:param comment_markers: List of comment markers.
|
||||
:type comment_markers: list
|
||||
|
||||
:param unpack_singletons: Whether to unpack singleton collections
|
||||
(collections with only a single value).
|
||||
:type unpack_singletons: boolean
|
||||
|
||||
:param string_mappings: User-defined string representations of
|
||||
Python objects.
|
||||
:type string_mappings: dict
|
||||
"""
|
||||
self._delimiter = delimiter
|
||||
self._comment_markers = comment_markers
|
||||
self._string_converter = StringConverter(string_mappings)
|
||||
self._unpack_singletons = unpack_singletons
|
||||
|
||||
def serialize(self, dict_data):
|
||||
output = StringIO.StringIO()
|
||||
writer = csv.writer(output, delimiter=self._delimiter,
|
||||
quoting=self.QUOTING_MODE,
|
||||
strict=self.STRICT_MODE)
|
||||
|
||||
for key, value in sorted(dict_data.items()):
|
||||
writer.writerows(self._to_rows(key, value))
|
||||
|
||||
return output.getvalue()
|
||||
|
||||
def deserialize(self, stream):
|
||||
reader = csv.reader(StringIO.StringIO(stream),
|
||||
delimiter=self._delimiter,
|
||||
quoting=self.QUOTING_MODE,
|
||||
strict=self.STRICT_MODE)
|
||||
|
||||
return self._to_dict(reader)
|
||||
|
||||
def _to_dict(self, reader):
|
||||
data_dict = {}
|
||||
for row in reader:
|
||||
# Ignore comment lines.
|
||||
if row and not row[0].startswith(self._comment_markers):
|
||||
items = self._string_converter.to_objects(
|
||||
[v if v else None for v in row[1:]])
|
||||
current = data_dict.get(row[0])
|
||||
if current is not None:
|
||||
current.append(trove_utils.unpack_singleton(items)
|
||||
if self._unpack_singletons else items)
|
||||
else:
|
||||
data_dict.update({row[0]: [items]})
|
||||
|
||||
if self._unpack_singletons:
|
||||
# Unpack singleton values.
|
||||
for k, v in data_dict.items():
|
||||
data_dict.update({k: trove_utils.unpack_singleton(v)})
|
||||
|
||||
return data_dict
|
||||
|
||||
def _to_rows(self, header, items):
|
||||
rows = []
|
||||
if trove_utils.is_collection(items):
|
||||
if any(trove_utils.is_collection(item) for item in items):
|
||||
# This is multi-row property.
|
||||
for item in items:
|
||||
rows.extend(self._to_rows(header, item))
|
||||
else:
|
||||
# This is a single-row property with multiple arguments.
|
||||
rows.append(self._to_list(
|
||||
header, self._string_converter.to_strings(items)))
|
||||
else:
|
||||
# This is a single-row property with only one argument.
|
||||
rows.append(self._to_list(header, items))
|
||||
|
||||
return rows
|
||||
|
||||
def _to_list(self, *items):
|
||||
container = []
|
||||
for item in items:
|
||||
if trove_utils.is_collection(item):
|
||||
# This item is a nested collection - unpack it.
|
||||
container.extend(self._to_list(*item))
|
||||
else:
|
||||
# This item is not a collection - append it to the list.
|
||||
container.append(item)
|
||||
|
||||
return container
|
|
@ -14,11 +14,13 @@
|
|||
# under the License.
|
||||
"""I totally stole most of this from melange, thx guys!!!"""
|
||||
|
||||
import collections
|
||||
import datetime
|
||||
import inspect
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
import types
|
||||
import uuid
|
||||
|
||||
from eventlet.timeout import Timeout
|
||||
|
@ -289,3 +291,23 @@ def gen_ports(portstr):
|
|||
if int(from_port) > int(to_port):
|
||||
raise ValueError
|
||||
return from_port, to_port
|
||||
|
||||
|
||||
def unpack_singleton(container):
|
||||
"""Unpack singleton collections.
|
||||
|
||||
Check whether a given collection is a singleton (has exactly one element)
|
||||
and unpack it if that is the case.
|
||||
Return the original collection otherwise.
|
||||
"""
|
||||
if is_collection(container) and len(container) == 1:
|
||||
return unpack_singleton(container[0])
|
||||
|
||||
return container
|
||||
|
||||
|
||||
def is_collection(item):
|
||||
"""Return True is a given item is an iterable collection, but not a string.
|
||||
"""
|
||||
return (isinstance(item, collections.Iterable) and
|
||||
not isinstance(item, types.StringTypes))
|
||||
|
|
|
@ -237,33 +237,37 @@ class ConfigurationsController(wsgi.Controller):
|
|||
|
||||
# integer min/max checking
|
||||
if isinstance(v, (int, long)) and not isinstance(v, bool):
|
||||
try:
|
||||
min_value = int(rule.min_size)
|
||||
except ValueError:
|
||||
raise exception.TroveError(_(
|
||||
"Invalid or unsupported min value defined in the "
|
||||
"configuration-parameters configuration file. "
|
||||
"Expected integer."))
|
||||
if v < min_value:
|
||||
output = {"key": k, "min": min_value}
|
||||
message = _("The value for the configuration parameter "
|
||||
"%(key)s is less than the minimum allowed: "
|
||||
"%(min)s") % output
|
||||
raise exception.UnprocessableEntity(message=message)
|
||||
if rule.min_size is not None:
|
||||
try:
|
||||
min_value = int(rule.min_size)
|
||||
except ValueError:
|
||||
raise exception.TroveError(_(
|
||||
"Invalid or unsupported min value defined in the "
|
||||
"configuration-parameters configuration file. "
|
||||
"Expected integer."))
|
||||
if v < min_value:
|
||||
output = {"key": k, "min": min_value}
|
||||
message = _(
|
||||
"The value for the configuration parameter "
|
||||
"%(key)s is less than the minimum allowed: "
|
||||
"%(min)s") % output
|
||||
raise exception.UnprocessableEntity(message=message)
|
||||
|
||||
try:
|
||||
max_value = int(rule.max_size)
|
||||
except ValueError:
|
||||
raise exception.TroveError(_(
|
||||
"Invalid or unsupported max value defined in the "
|
||||
"configuration-parameters configuration file. "
|
||||
"Expected integer."))
|
||||
if v > max_value:
|
||||
output = {"key": k, "max": max_value}
|
||||
message = _("The value for the configuration parameter "
|
||||
"%(key)s is greater than the maximum "
|
||||
"allowed: %(max)s") % output
|
||||
raise exception.UnprocessableEntity(message=message)
|
||||
if rule.max_size is not None:
|
||||
try:
|
||||
max_value = int(rule.max_size)
|
||||
except ValueError:
|
||||
raise exception.TroveError(_(
|
||||
"Invalid or unsupported max value defined in the "
|
||||
"configuration-parameters configuration file. "
|
||||
"Expected integer."))
|
||||
if v > max_value:
|
||||
output = {"key": k, "max": max_value}
|
||||
message = _(
|
||||
"The value for the configuration parameter "
|
||||
"%(key)s is greater than the maximum "
|
||||
"allowed: %(max)s") % output
|
||||
raise exception.UnprocessableEntity(message=message)
|
||||
|
||||
@staticmethod
|
||||
def _find_type(value_type):
|
||||
|
@ -288,6 +292,7 @@ class ConfigurationsController(wsgi.Controller):
|
|||
|
||||
|
||||
class ParametersController(wsgi.Controller):
|
||||
|
||||
def index(self, req, tenant_id, datastore, id):
|
||||
ds, ds_version = ds_models.get_datastore_version(
|
||||
type=datastore, version=id)
|
||||
|
|
|
@ -0,0 +1,512 @@
|
|||
# Copyright 2015 Tesora Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
import os
|
||||
import six
|
||||
|
||||
from trove.common import cfg
|
||||
from trove.common import exception
|
||||
from trove.common.i18n import _
|
||||
from trove.guestagent.common import guestagent_utils
|
||||
from trove.guestagent.common import operating_system
|
||||
from trove.guestagent.common.operating_system import FileMode
|
||||
|
||||
CONF = cfg.CONF
|
||||
MANAGER = CONF.datastore_manager
|
||||
|
||||
|
||||
class ConfigurationError(exception.TroveError):
|
||||
|
||||
def __init__(self, msg):
|
||||
super(ConfigurationError, self).__init__(msg)
|
||||
|
||||
|
||||
class ConfigurationManager(object):
|
||||
"""
|
||||
ConfigurationManager is responsible for management of
|
||||
datastore configuration.
|
||||
Its base functionality includes reading and writing configuration files.
|
||||
It is responsible for validating user inputs and requests.
|
||||
When supplied an override strategy it allows the user to manage
|
||||
configuration overrides as well.
|
||||
"""
|
||||
|
||||
def __init__(self, base_config_path, owner, group, codec,
|
||||
requires_root=False):
|
||||
"""
|
||||
:param base_config_path Path to the configuration file.
|
||||
:type base_config_path string
|
||||
|
||||
:param owner Owner of the configuration files.
|
||||
:type owner string
|
||||
|
||||
:param group Group of the configuration files.
|
||||
:type group string
|
||||
|
||||
:param codec Codec for reading/writing of the particular
|
||||
configuration format.
|
||||
:type codec StreamCodec
|
||||
|
||||
:param requires_root Whether the manager requires superuser
|
||||
privileges.
|
||||
:type requires_root boolean
|
||||
"""
|
||||
self._base_config_path = base_config_path
|
||||
self._owner = owner
|
||||
self._group = group
|
||||
self._codec = codec
|
||||
self._requires_root = requires_root
|
||||
|
||||
self._current_revision = 0
|
||||
self._max_num_overrides = 0
|
||||
self._override_strategy = None
|
||||
|
||||
@property
|
||||
def override_strategy(self):
|
||||
return self._override_strategy
|
||||
|
||||
@override_strategy.setter
|
||||
def override_strategy(self, value):
|
||||
if value:
|
||||
value.configure(
|
||||
self._base_config_path, self._owner, self._group, self._codec,
|
||||
self._requires_root)
|
||||
# The 'system' revision does not count, hence '-1'.
|
||||
self._current_revision = max(value.count_revisions() - 1, 0)
|
||||
else:
|
||||
self._current_revision = 0
|
||||
|
||||
self._max_num_overrides = 0
|
||||
self._override_strategy = value
|
||||
|
||||
@property
|
||||
def current_revision(self):
|
||||
return self._current_revision
|
||||
|
||||
@property
|
||||
def max_num_overrides(self):
|
||||
return self._max_num_overrides
|
||||
|
||||
@max_num_overrides.setter
|
||||
def max_num_overrides(self, value):
|
||||
"""
|
||||
Maximum number of configuration overrides that can be attached to this
|
||||
instance.
|
||||
"""
|
||||
if value and value < 0:
|
||||
raise exception.UnprocessableEntity(
|
||||
_("The maximum number of attached Configuration Groups "
|
||||
"cannot be negative."))
|
||||
self._max_num_overrides = value
|
||||
|
||||
def set_override_strategy(self, strategy, max_num_overrides=1):
|
||||
"""Set a strategy for management of configuration overrides.
|
||||
"""
|
||||
self.override_strategy = strategy
|
||||
self.max_num_overrides = max_num_overrides
|
||||
|
||||
def parse_configuration(self):
|
||||
"""Read contents of the configuration file (applying overrides if any)
|
||||
and parse it into a dict.
|
||||
|
||||
:returns: Configuration file as a Python dict.
|
||||
"""
|
||||
|
||||
base_options = operating_system.read_file(
|
||||
self._base_config_path, codec=self._codec)
|
||||
|
||||
if self._override_strategy:
|
||||
updates = self._override_strategy.parse_updates()
|
||||
guestagent_utils.update_dict(updates, base_options)
|
||||
|
||||
return base_options
|
||||
|
||||
def get_value(self, key, default=None):
|
||||
"""Return value at a given key or 'default'.
|
||||
"""
|
||||
config = self.parse_configuration()
|
||||
return config.get(key, default)
|
||||
|
||||
def save_configuration(self, contents):
|
||||
"""Write given contents to the base configuration file.
|
||||
Remove all existing revisions.
|
||||
|
||||
:param contents Plain-text contents of the configuration file.
|
||||
:type contents string
|
||||
"""
|
||||
if self._override_strategy:
|
||||
self._override_strategy.remove_last(self._current_revision + 1)
|
||||
|
||||
operating_system.write_file(
|
||||
self._base_config_path, contents, as_root=self._requires_root)
|
||||
operating_system.chown(
|
||||
self._base_config_path, self._owner, self._group,
|
||||
as_root=self._requires_root)
|
||||
operating_system.chmod(
|
||||
self._base_config_path, FileMode.ADD_READ_ALL,
|
||||
as_root=self._requires_root)
|
||||
|
||||
def render_configuration(self, options):
|
||||
"""Write contents to the base configuration file.
|
||||
Remove all existing revisions.
|
||||
|
||||
:param options Configuration options.
|
||||
:type options dict
|
||||
"""
|
||||
self.save_configuration(self._codec.serialize(options))
|
||||
|
||||
def update_configuration(self, options):
|
||||
"""Update given options in the configuration.
|
||||
|
||||
The updates are stored in a 'system' revision if the manager
|
||||
supports configuration overrides. Otherwise they get applied
|
||||
directly to the base configuration file.
|
||||
|
||||
The 'system' revision is always applied last to ensure it
|
||||
overrides any user-specified configuration changes.
|
||||
"""
|
||||
if self._override_strategy:
|
||||
# Update the system overrides.
|
||||
system_overrides = self._override_strategy.get(
|
||||
self._current_revision + 1)
|
||||
guestagent_utils.update_dict(options, system_overrides)
|
||||
# Re-apply the updated system overrides.
|
||||
self._override_strategy.remove_last(1)
|
||||
self._override_strategy.apply_next(system_overrides)
|
||||
else:
|
||||
# Update the base configuration file.
|
||||
config = self.parse_configuration()
|
||||
guestagent_utils.update_dict(options, config)
|
||||
self.render_configuration(config)
|
||||
|
||||
def update_override(self, contents):
|
||||
"""Same as 'apply_override' but accepts serialized
|
||||
input.
|
||||
|
||||
:param contents Plain-text contents of the configuration file.
|
||||
:type contents string
|
||||
"""
|
||||
self.apply_override(self._codec.deserialize(contents))
|
||||
|
||||
def apply_override(self, options):
|
||||
"""Update given options of the current configuration. The 'system'
|
||||
values will be re-applied over this override.
|
||||
|
||||
:raises: :class:`ConfigurationError` if the maximum number of
|
||||
overrides attached to this instance is exceeded.
|
||||
"""
|
||||
if self._override_strategy:
|
||||
if self._current_revision < self.max_num_overrides:
|
||||
# Save off the 'system' overrides and remove the revision file.
|
||||
# apply the user-options and re-apply the system values
|
||||
# on the top of it.
|
||||
system_overrides = self._override_strategy.get(
|
||||
self._current_revision + 1)
|
||||
self._override_strategy.remove_last(1)
|
||||
self._override_strategy.apply_next(options)
|
||||
self._override_strategy.apply_next(system_overrides)
|
||||
self._current_revision = self._current_revision + 1
|
||||
else:
|
||||
raise ConfigurationError(
|
||||
_("This instance cannot have more than '%d' "
|
||||
"Configuration Groups attached.")
|
||||
% self.max_num_overrides)
|
||||
else:
|
||||
raise exception.DatastoreOperationNotSupported(
|
||||
operation='update_overrides', datastore=MANAGER)
|
||||
|
||||
def remove_override(self):
|
||||
"""Revert the last configuration override. This does not include the
|
||||
'system' overrides.
|
||||
|
||||
:raises: :class:`ConfigurationError` if there are currently no
|
||||
overrides attached to this instance.
|
||||
"""
|
||||
if self._override_strategy:
|
||||
if self._current_revision > 0:
|
||||
# Save off the 'system' overrides, rollback the last two
|
||||
# revisions (system + the last user-defined override) and
|
||||
# re-apply the system values.
|
||||
system_overrides = self._override_strategy.get(
|
||||
self._current_revision + 1)
|
||||
self._override_strategy.remove_last(2)
|
||||
self._override_strategy.apply_next(system_overrides)
|
||||
self._current_revision = self._current_revision - 1
|
||||
else:
|
||||
raise ConfigurationError(
|
||||
_("This instance does not have a Configuration Group "
|
||||
"attached."))
|
||||
else:
|
||||
raise exception.DatastoreOperationNotSupported(
|
||||
operation='update_overrides', datastore=MANAGER)
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ConfigurationOverrideStrategy(object):
|
||||
"""ConfigurationOverrideStrategy handles configuration files.
|
||||
The strategy provides functionality to enumerate, apply and remove
|
||||
configuration overrides (revisions).
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def configure(self, *args, **kwargs):
|
||||
"""Configure this strategy.
|
||||
A strategy needs to be configured before it can be used.
|
||||
It would typically be configured by the ConfigurationManager.
|
||||
"""
|
||||
|
||||
def count_revisions(self):
|
||||
"""Return the number of existing revisions.
|
||||
"""
|
||||
return len(self._collect_revisions())
|
||||
|
||||
@abc.abstractmethod
|
||||
def apply_next(self, options):
|
||||
"""Apply given options on the current revision.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def remove_last(self, num_revisions):
|
||||
"""Rollback the last 'num_revisions' of revisions.
|
||||
|
||||
:param num_revisions Number of last revisions to rollback.
|
||||
Rollback all if it is greater or
|
||||
equal to the number of existing revisions.
|
||||
:type num_revisions int
|
||||
"""
|
||||
|
||||
def _list_all_files(self, root_dir, pattern):
|
||||
return operating_system.list_files_in_directory(
|
||||
root_dir, recursive=False, pattern=pattern)
|
||||
|
||||
def parse_updates(self):
|
||||
"""Return all updates applied to the base revision as a single dict.
|
||||
Return an empty dict if the base file is always the most current
|
||||
version of configuration.
|
||||
|
||||
:returns: Updates to the base revision s as a Python dict.
|
||||
"""
|
||||
return {}
|
||||
|
||||
@abc.abstractmethod
|
||||
def get(self, revision):
|
||||
"""Return parsed contents of a given revision.
|
||||
|
||||
:returns: Contents of the last revision file as a Python dict.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def _collect_revisions(self):
|
||||
"""Collect and return a sorted list of paths to existing revision
|
||||
files. The files should be sorted in the same order in which
|
||||
they were applied.
|
||||
"""
|
||||
|
||||
|
||||
class RollingOverrideStrategy(ConfigurationOverrideStrategy):
|
||||
"""Rolling strategy maintains a single configuration file.
|
||||
It applies overrides in-place always backing-up the current revision of
|
||||
the file so that it can be restored when the override is removed.
|
||||
It appends a revision number to the backup file name so that they
|
||||
can be restored in the order opposite to in which they were applied.
|
||||
"""
|
||||
|
||||
_BACKUP_EXT = 'old'
|
||||
_BACKUP_NAME_PATTERN = '^.*\.[1-9]+.%s$' % _BACKUP_EXT
|
||||
|
||||
def __init__(self, revision_backup_dir):
|
||||
"""
|
||||
:param revision_backup_dir Path to the directory for revision
|
||||
backups.
|
||||
:type revision_backup_dir string
|
||||
"""
|
||||
self._revision_backup_dir = revision_backup_dir
|
||||
|
||||
def configure(self, base_config_path, owner, group, codec, requires_root):
|
||||
"""
|
||||
:param base_config_path Path to the configuration file.
|
||||
:type base_config_path string
|
||||
|
||||
:param owner Owner of the configuration and
|
||||
backup files.
|
||||
:type owner string
|
||||
|
||||
:param group Group of the configuration and
|
||||
backup files.
|
||||
:type group string
|
||||
|
||||
:param codec Codec for reading/writing of the particular
|
||||
configuration format.
|
||||
:type codec StreamCodec
|
||||
|
||||
:param requires_root Whether the strategy requires superuser
|
||||
privileges.
|
||||
:type requires_root boolean
|
||||
"""
|
||||
self._base_config_path = base_config_path
|
||||
self._owner = owner
|
||||
self._group = group
|
||||
self._codec = codec
|
||||
self._requires_root = requires_root
|
||||
self._base_config_name = os.path.basename(base_config_path)
|
||||
|
||||
def apply_next(self, options):
|
||||
revision_num = self.count_revisions() + 1
|
||||
old_revision_backup = guestagent_utils.build_file_path(
|
||||
self._revision_backup_dir, self._base_config_name,
|
||||
str(revision_num), self._BACKUP_EXT)
|
||||
operating_system.copy(self._base_config_path, old_revision_backup,
|
||||
force=True, preserve=True,
|
||||
as_root=self._requires_root)
|
||||
current = operating_system.read_file(self._base_config_path,
|
||||
codec=self._codec)
|
||||
guestagent_utils.update_dict(options, current)
|
||||
operating_system.write_file(
|
||||
self._base_config_path, current, codec=self._codec,
|
||||
as_root=self._requires_root)
|
||||
operating_system.chown(
|
||||
self._base_config_path, self._owner, self._group,
|
||||
as_root=self._requires_root)
|
||||
operating_system.chmod(
|
||||
self._base_config_path, FileMode.ADD_READ_ALL,
|
||||
as_root=self._requires_root)
|
||||
|
||||
def remove_last(self, num_revisions):
|
||||
count = self.count_revisions()
|
||||
revision_files = self._delete_revisions(min(count, num_revisions) - 1)
|
||||
if revision_files:
|
||||
operating_system.move(revision_files[-1], self._base_config_path,
|
||||
force=True, as_root=self._requires_root)
|
||||
|
||||
def _delete_revisions(self, num_revisions):
|
||||
revision_files = self._collect_revisions()
|
||||
deleted_files = []
|
||||
if num_revisions > 0:
|
||||
deleted_files = revision_files[-num_revisions:]
|
||||
for path in deleted_files:
|
||||
operating_system.remove(path, force=True,
|
||||
as_root=self._requires_root)
|
||||
|
||||
return [path for path in revision_files if path not in deleted_files]
|
||||
|
||||
def get(self, revision):
|
||||
revisions = self._collect_revisions()
|
||||
if revisions:
|
||||
# Return the difference between this revision and the current base.
|
||||
this_revision = operating_system.read_file(
|
||||
revisions[revision - 1], codec=self._codec)
|
||||
current_base = operating_system.read_file(
|
||||
self._base_config_path, codec=self._codec)
|
||||
|
||||
return guestagent_utils.dict_difference(this_revision,
|
||||
current_base)
|
||||
|
||||
return {}
|
||||
|
||||
def _collect_revisions(self):
|
||||
return sorted(self._list_all_files(
|
||||
self._revision_backup_dir, self._BACKUP_NAME_PATTERN))
|
||||
|
||||
|
||||
class ImportOverrideStrategy(ConfigurationOverrideStrategy):
|
||||
"""Import strategy keeps overrides in separate files that get imported
|
||||
into the base configuration file which never changes itself.
|
||||
An override file is simply deleted when the override is removed.
|
||||
It appends a revision number to the backup file name so that they
|
||||
can be restored in the order opposite to in which they were applied.
|
||||
"""
|
||||
|
||||
def __init__(self, revision_dir, revision_ext):
|
||||
"""
|
||||
:param revision_dir Path to the directory for import files.
|
||||
:type revision_dir string
|
||||
|
||||
:param revision_ext Extension of revision files.
|
||||
:type revision_ext string
|
||||
"""
|
||||
self._revision_dir = revision_dir
|
||||
self._revision_ext = revision_ext
|
||||
self._import_name_pattern = '^.*\.[1-9]+.%s$' % revision_ext
|
||||
|
||||
def configure(self, base_config_path, owner, group, codec, requires_root):
|
||||
"""
|
||||
:param base_config_path Path to the configuration file.
|
||||
:type base_config_path string
|
||||
|
||||
:param owner Owner of the configuration and
|
||||
revision files.
|
||||
:type owner string
|
||||
|
||||
:param group Group of the configuration and
|
||||
revision files.
|
||||
:type group string
|
||||
|
||||
:param codec Codec for reading/writing of the particular
|
||||
configuration format.
|
||||
:type codec StreamCodec
|
||||
|
||||
:param requires_root Whether the strategy requires superuser
|
||||
privileges.
|
||||
:type requires_root boolean
|
||||
"""
|
||||
self._base_config_path = base_config_path
|
||||
self._owner = owner
|
||||
self._group = group
|
||||
self._codec = codec
|
||||
self._requires_root = requires_root
|
||||
self._base_config_name = os.path.basename(base_config_path)
|
||||
|
||||
def apply_next(self, options):
|
||||
revision_num = self.count_revisions() + 1
|
||||
revision_file_path = guestagent_utils.build_file_path(
|
||||
self._revision_dir, self._base_config_name, str(revision_num),
|
||||
self._revision_ext)
|
||||
operating_system.write_file(
|
||||
revision_file_path, options,
|
||||
codec=self._codec, as_root=self._requires_root)
|
||||
operating_system.chown(revision_file_path, self._owner, self._group,
|
||||
as_root=self._requires_root)
|
||||
operating_system.chmod(revision_file_path, FileMode.ADD_READ_ALL,
|
||||
as_root=self._requires_root)
|
||||
|
||||
def remove_last(self, num_revisions):
|
||||
revision_files = self._collect_revisions()
|
||||
deleted_files = []
|
||||
if num_revisions > 0:
|
||||
deleted_files = revision_files[-num_revisions:]
|
||||
for path in deleted_files:
|
||||
operating_system.remove(path, force=True,
|
||||
as_root=self._requires_root)
|
||||
|
||||
def get(self, revision):
|
||||
revision_files = self._collect_revisions()
|
||||
if revision_files:
|
||||
revision_file = revision_files[revision - 1]
|
||||
return operating_system.read_file(revision_file, codec=self._codec)
|
||||
|
||||
return {}
|
||||
|
||||
def parse_updates(self):
|
||||
parsed_options = {}
|
||||
for path in self._collect_revisions():
|
||||
options = operating_system.read_file(path, codec=self._codec)
|
||||
guestagent_utils.update_dict(options, parsed_options)
|
||||
|
||||
return parsed_options
|
||||
|
||||
def _collect_revisions(self):
|
||||
return sorted(self._list_all_files(
|
||||
self._revision_dir, self._import_name_pattern))
|
|
@ -0,0 +1,61 @@
|
|||
# Copyright 2015 Tesora Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import collections
|
||||
import os
|
||||
|
||||
|
||||
def update_dict(updates, target):
|
||||
"""Recursively update a target dictionary with given updates.
|
||||
|
||||
Updates are provided as a dictionary of key-value pairs
|
||||
where a value can also be a nested dictionary in which case
|
||||
its key is treated as a sub-section of the outer key.
|
||||
If a list value is encountered the update is applied
|
||||
iteratively on all its items.
|
||||
"""
|
||||
if isinstance(target, list):
|
||||
for index, item in enumerate(target):
|
||||
target[index] = update_dict(updates, item)
|
||||
return target
|
||||
|
||||
for k, v in updates.iteritems():
|
||||
if isinstance(v, collections.Mapping):
|
||||
target[k] = update_dict(v, target.get(k, {}))
|
||||
else:
|
||||
target[k] = updates[k]
|
||||
|
||||
return target
|
||||
|
||||
|
||||
def build_file_path(base_dir, base_name, *extensions):
|
||||
"""Build a path to a file in a given directory.
|
||||
The file may have an extension(s).
|
||||
|
||||
:returns: Path such as: 'base_dir/base_name.ext1.ext2.ext3'
|
||||
"""
|
||||
file_name = os.extsep.join([base_name] + list(extensions))
|
||||
return os.path.join(base_dir, file_name)
|
||||
|
||||
|
||||
def dict_difference(a, b):
|
||||
"""Recursively compute an asymmetric difference between dicts 'a' and 'b'.
|
||||
By 'difference' we mean the items that were either changed or added in 'b'.
|
||||
"""
|
||||
return {k: dict_difference(a[k], v)
|
||||
if isinstance(v, collections.Mapping)
|
||||
else v
|
||||
for k, v in b.items()
|
||||
if k not in a or v != a[k]}
|
|
@ -16,12 +16,16 @@
|
|||
import inspect
|
||||
import operator
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import tempfile
|
||||
|
||||
from functools import reduce
|
||||
from oslo_concurrency.processutils import UnknownArgumentError
|
||||
|
||||
from trove.common import exception
|
||||
from trove.common.i18n import _
|
||||
from trove.common.stream_codecs import IdentityCodec
|
||||
from trove.common import utils
|
||||
|
||||
REDHAT = 'redhat'
|
||||
|
@ -29,6 +33,79 @@ DEBIAN = 'debian'
|
|||
SUSE = 'suse'
|
||||
|
||||
|
||||
def read_file(path, codec=IdentityCodec()):
|
||||
"""
|
||||
Read a file into a Python data structure
|
||||
digestible by 'write_file'.
|
||||
|
||||
:param path Path to the read config file.
|
||||
:type path string
|
||||
|
||||
:param codec: A codec used to deserialize the data.
|
||||
:type codec: StreamCodec
|
||||
|
||||
:returns: A dictionary of key-value pairs.
|
||||
|
||||
:raises: :class:`UnprocessableEntity` if file doesn't exist.
|
||||
:raises: :class:`UnprocessableEntity` if codec not given.
|
||||
"""
|
||||
if path and os.path.exists(path):
|
||||
with open(path, 'r') as fp:
|
||||
return codec.deserialize(fp.read())
|
||||
|
||||
raise exception.UnprocessableEntity(_("File does not exist: %s") % path)
|
||||
|
||||
|
||||
def write_file(path, data, codec=IdentityCodec(), as_root=False):
|
||||
"""Write data into file using a given codec.
|
||||
Overwrite any existing contents.
|
||||
The written file can be read back into its original
|
||||
form by 'read_file'.
|
||||
|
||||
:param path Path to the written config file.
|
||||
:type path string
|
||||
|
||||
:param data: An object representing the file contents.
|
||||
:type data: object
|
||||
|
||||
:param codec: A codec used to serialize the data.
|
||||
:type codec: StreamCodec
|
||||
|
||||
:param codec: Execute as root.
|
||||
:type codec: boolean
|
||||
|
||||
:raises: :class:`UnprocessableEntity` if path not given.
|
||||
"""
|
||||
if path:
|
||||
if as_root:
|
||||
_write_file_as_root(path, data, codec)
|
||||
else:
|
||||
with open(path, 'w', 0) as fp:
|
||||
fp.write(codec.serialize(data))
|
||||
else:
|
||||
raise exception.UnprocessableEntity(_("Invalid path: %s") % path)
|
||||
|
||||
|
||||
def _write_file_as_root(path, data, codec=IdentityCodec):
|
||||
"""Write a file as root. Overwrite any existing contents.
|
||||
|
||||
:param path Path to the written file.
|
||||
:type path string
|
||||
|
||||
:param data: An object representing the file contents.
|
||||
:type data: StreamCodec
|
||||
|
||||
:param codec: A codec used to serialize the data.
|
||||
:type codec: StreamCodec
|
||||
"""
|
||||
# The files gets removed automatically once the managing object goes
|
||||
# out of scope.
|
||||
with tempfile.NamedTemporaryFile('w', 0, delete=False) as fp:
|
||||
fp.write(codec.serialize(data))
|
||||
fp.close() # Release the resource before proceeding.
|
||||
copy(fp.name, path, force=True, as_root=True)
|
||||
|
||||
|
||||
class FileMode(object):
|
||||
"""
|
||||
Represent file permissions (or 'modes') that can be applied on a filesystem
|
||||
|
@ -198,6 +275,7 @@ def service_discovery(service_candidates):
|
|||
result['cmd_enable'] = "sudo systemctl enable %s" % service
|
||||
result['cmd_disable'] = "sudo systemctl disable %s" % service
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
@ -487,3 +565,23 @@ def _build_command_options(options):
|
|||
"""
|
||||
|
||||
return ['-' + item[0] for item in options if item[1]]
|
||||
|
||||
|
||||
def list_files_in_directory(root_dir, recursive=False, pattern=None):
|
||||
"""
|
||||
Return absolute paths to all files in a given root directory.
|
||||
|
||||
:param root_dir Path to the root directory.
|
||||
:type root_dir string
|
||||
|
||||
:param recursive Also probe subdirectories if True.
|
||||
:type recursive boolean
|
||||
|
||||
:param pattern Return only files matching the pattern.
|
||||
:type pattern string
|
||||
"""
|
||||
return {os.path.abspath(os.path.join(root, name))
|
||||
for (root, _, files) in os.walk(root_dir, topdown=True)
|
||||
if recursive or (root == root_dir)
|
||||
for name in files
|
||||
if not pattern or re.match(pattern, name)}
|
||||
|
|
|
@ -1340,8 +1340,11 @@ class BuiltInstanceTasks(BuiltInstance, NotifyMixin, ConfigurationMixin):
|
|||
val = None
|
||||
restart_required = inst_models.InstanceTasks.RESTART_REQUIRED
|
||||
self.update_db(task_status=restart_required)
|
||||
if val:
|
||||
if val and isinstance(val, basestring):
|
||||
overrides[item.configuration_key] = _convert_value(val)
|
||||
else:
|
||||
overrides[item.configuration_key] = val
|
||||
|
||||
LOG.debug("setting the default variables in dict: %s" % overrides)
|
||||
self.update_overrides(overrides, remove=True)
|
||||
|
||||
|
|
|
@ -61,3 +61,17 @@ class TestTroveExecuteWithTimeout(trove_testtools.TestCase):
|
|||
utils.LOG.error.assert_called_with(
|
||||
u"Command 'test' failed. test-desc Exit code: 42\n"
|
||||
"stderr: err\nstdout: out")
|
||||
|
||||
def test_unpack_singleton(self):
|
||||
self.assertEqual([1, 2, 3], utils.unpack_singleton([1, 2, 3]))
|
||||
self.assertEqual(0, utils.unpack_singleton([0]))
|
||||
self.assertEqual('test', utils.unpack_singleton('test'))
|
||||
self.assertEqual('test', utils.unpack_singleton(['test']))
|
||||
self.assertEqual([], utils.unpack_singleton([]))
|
||||
self.assertEqual(None, utils.unpack_singleton(None))
|
||||
self.assertEqual([None, None], utils.unpack_singleton([None, None]))
|
||||
self.assertEqual('test', utils.unpack_singleton([['test']]))
|
||||
self.assertEqual([1, 2, 3], utils.unpack_singleton([[1, 2, 3]]))
|
||||
self.assertEqual(1, utils.unpack_singleton([[[1]]]))
|
||||
self.assertEqual([[1], [2]], utils.unpack_singleton([[1], [2]]))
|
||||
self.assertEqual(['a', 'b'], utils.unpack_singleton(['a', 'b']))
|
||||
|
|
|
@ -0,0 +1,577 @@
|
|||
# Copyright 2015 Tesora Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import getpass
|
||||
from mock import DEFAULT
|
||||
from mock import MagicMock
|
||||
from mock import Mock
|
||||
from mock import patch
|
||||
import os
|
||||
import tempfile
|
||||
from testtools.testcase import ExpectedException
|
||||
from trove.common import exception
|
||||
from trove.common.stream_codecs import IniCodec
|
||||
from trove.guestagent.common.configuration import ConfigurationError
|
||||
from trove.guestagent.common.configuration import ConfigurationManager
|
||||
from trove.guestagent.common.configuration import ImportOverrideStrategy
|
||||
from trove.guestagent.common.configuration import RollingOverrideStrategy
|
||||
from trove.guestagent.common import operating_system
|
||||
from trove.guestagent.common.operating_system import FileMode
|
||||
from trove.tests.unittests import trove_testtools
|
||||
|
||||
|
||||
class TestConfigurationManager(trove_testtools.TestCase):
|
||||
|
||||
@patch.multiple('trove.guestagent.common.operating_system',
|
||||
read_file=DEFAULT, write_file=DEFAULT,
|
||||
chown=DEFAULT, chmod=DEFAULT)
|
||||
def test_read_write_configuration(self, read_file, write_file,
|
||||
chown, chmod):
|
||||
sample_path = Mock()
|
||||
sample_owner = Mock()
|
||||
sample_group = Mock()
|
||||
sample_codec = MagicMock()
|
||||
sample_requires_root = Mock()
|
||||
|
||||
manager = ConfigurationManager(
|
||||
sample_path, sample_owner, sample_group, sample_codec,
|
||||
requires_root=sample_requires_root)
|
||||
|
||||
manager.parse_configuration()
|
||||
read_file.assert_called_with(sample_path, codec=sample_codec)
|
||||
|
||||
with patch.object(manager, 'parse_configuration',
|
||||
return_value={'key1': 'v1', 'key2': 'v2'}):
|
||||
self.assertEqual('v1', manager.get_value('key1'))
|
||||
self.assertEqual(None, manager.get_value('key3'))
|
||||
|
||||
sample_contents = Mock()
|
||||
manager.save_configuration(sample_contents)
|
||||
write_file.assert_called_with(
|
||||
sample_path, sample_contents, as_root=sample_requires_root)
|
||||
|
||||
chown.assert_called_with(sample_path, sample_owner, sample_group,
|
||||
as_root=sample_requires_root)
|
||||
chmod.assert_called_with(
|
||||
sample_path, FileMode.ADD_READ_ALL, as_root=sample_requires_root)
|
||||
|
||||
sample_options = Mock()
|
||||
with patch.object(manager, 'save_configuration') as save_config:
|
||||
manager.render_configuration(sample_options)
|
||||
save_config.assert_called_once_with(
|
||||
sample_codec.serialize.return_value)
|
||||
sample_codec.serialize.assert_called_once_with(sample_options)
|
||||
|
||||
with patch('trove.guestagent.common.configuration.'
|
||||
'ConfigurationOverrideStrategy') as mock_strategy:
|
||||
manager.set_override_strategy(mock_strategy)
|
||||
manager._current_revision = 3
|
||||
manager.save_configuration(sample_contents)
|
||||
mock_strategy.remove_last.assert_called_once_with(
|
||||
manager._current_revision + 1)
|
||||
write_file.assert_called_with(
|
||||
sample_path, sample_contents, as_root=sample_requires_root)
|
||||
|
||||
@patch(
|
||||
'trove.guestagent.common.configuration.ConfigurationOverrideStrategy')
|
||||
def test_configuration_manager(self, mock_strategy):
|
||||
mock_strategy.count_revisions.return_value = 0
|
||||
manager = ConfigurationManager(Mock(), Mock(), Mock(), Mock())
|
||||
|
||||
with ExpectedException(exception.DatastoreOperationNotSupported):
|
||||
manager.update_override({})
|
||||
|
||||
with ExpectedException(exception.DatastoreOperationNotSupported):
|
||||
manager.remove_override()
|
||||
|
||||
manager.set_override_strategy(mock_strategy, 1)
|
||||
|
||||
self.assertEqual(1, manager.max_num_overrides)
|
||||
self.assertEqual(0, manager.current_revision)
|
||||
|
||||
with ExpectedException(
|
||||
exception.UnprocessableEntity,
|
||||
"The maximum number of attached Configuration Groups cannot "
|
||||
"be negative."):
|
||||
manager.max_num_overrides = -1
|
||||
|
||||
manager.max_num_overrides = 2
|
||||
|
||||
self.assertEqual(2, manager.max_num_overrides)
|
||||
|
||||
self.assertEqual(0, manager.current_revision)
|
||||
manager.update_override({})
|
||||
self.assertEqual(1, manager.current_revision)
|
||||
manager.update_override({})
|
||||
self.assertEqual(2, manager.current_revision)
|
||||
|
||||
with ExpectedException(
|
||||
ConfigurationError, "This instance cannot have more than "
|
||||
"'2' Configuration Groups attached."):
|
||||
manager.update_override({})
|
||||
|
||||
self.assertEqual(2, manager.current_revision)
|
||||
manager.remove_override()
|
||||
self.assertEqual(1, manager.current_revision)
|
||||
manager.update_override({})
|
||||
self.assertEqual(2, manager.current_revision)
|
||||
manager.remove_override()
|
||||
self.assertEqual(1, manager.current_revision)
|
||||
manager.remove_override()
|
||||
self.assertEqual(0, manager.current_revision)
|
||||
|
||||
with ExpectedException(
|
||||
ConfigurationError,
|
||||
"This instance does not have a Configuration Group attached."):
|
||||
manager.remove_override()
|
||||
|
||||
self.assertEqual(0, manager.current_revision)
|
||||
|
||||
manager.override_strategy = None
|
||||
|
||||
self.assertEqual(0, manager.max_num_overrides)
|
||||
self.assertEqual(0, manager.current_revision)
|
||||
|
||||
|
||||
class TestConfigurationOverrideStrategy(trove_testtools.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
trove_testtools.TestCase.setUp(self)
|
||||
self._temp_files_paths = []
|
||||
|
||||
def tearDown(self):
|
||||
trove_testtools.TestCase.tearDown(self)
|
||||
|
||||
# Remove temprary files in the LIFO order.
|
||||
while self._temp_files_paths:
|
||||
try:
|
||||
os.remove(self._temp_files_paths.pop())
|
||||
except Exception:
|
||||
pass # Do not fail in cleanup.
|
||||
|
||||
def _create_temp_dir(self):
|
||||
path = tempfile.mkdtemp()
|
||||
self._temp_files_paths.append(path)
|
||||
return path
|
||||
|
||||
def test_rolling_override_strategy(self):
|
||||
base_config_contents = {'Section_1': {'name': 'pi',
|
||||
'is_number': 'True',
|
||||
'value': '3.1415'}
|
||||
}
|
||||
|
||||
config_overrides_v1 = {'Section_1': {'name': 'sqrt(2)',
|
||||
'value': '1.4142'}
|
||||
}
|
||||
|
||||
expected_contents_v1 = {'Section_1': {'name': 'sqrt(2)',
|
||||
'is_number': 'True',
|
||||
'value': '1.4142'}
|
||||
}
|
||||
|
||||
config_overrides_v2 = {'Section_1': {'is_number': 'False'}}
|
||||
|
||||
expected_contents_v2 = {'Section_1': {'name': 'sqrt(2)',
|
||||
'is_number': 'False',
|
||||
'value': '1.4142'}
|
||||
}
|
||||
|
||||
config_overrides_seq = [config_overrides_v1, config_overrides_v2]
|
||||
expected_contents_seq = [base_config_contents, expected_contents_v1,
|
||||
expected_contents_v2]
|
||||
|
||||
codec = IniCodec()
|
||||
current_user = getpass.getuser()
|
||||
backup_config_dir = self._create_temp_dir()
|
||||
|
||||
with tempfile.NamedTemporaryFile() as base_config:
|
||||
|
||||
# Write initial config contents.
|
||||
operating_system.write_file(
|
||||
base_config.name, base_config_contents, codec)
|
||||
|
||||
strategy = RollingOverrideStrategy(backup_config_dir)
|
||||
strategy.configure(
|
||||
base_config.name, current_user, current_user, codec, False)
|
||||
|
||||
self._assert_rolling_override_strategy(
|
||||
strategy, config_overrides_seq, expected_contents_seq)
|
||||
|
||||
def _assert_rolling_override_strategy(
|
||||
self, strategy, config_overrides_seq, expected_contents_seq):
|
||||
|
||||
def build_backup_path(revision):
|
||||
base_name = os.extsep.join(
|
||||
[os.path.basename(strategy._base_config_path),
|
||||
str(revision), 'old'])
|
||||
return os.path.join(
|
||||
strategy._revision_backup_dir, base_name)
|
||||
|
||||
# Test apply and rollback in sequence.
|
||||
######################################
|
||||
|
||||
# Apply a sequence of overrides.
|
||||
for revision, override in enumerate(config_overrides_seq, 1):
|
||||
|
||||
expected_backup_path = build_backup_path(revision)
|
||||
|
||||
# Apply overrides.
|
||||
strategy.apply_next(override)
|
||||
|
||||
# Check there is a backup of the old config file.
|
||||
self.assertTrue(os.path.exists(expected_backup_path),
|
||||
"Backup revision '%d' does not exist." % revision)
|
||||
|
||||
# Load overriden contents.
|
||||
overriden = operating_system.read_file(
|
||||
strategy._base_config_path, strategy._codec)
|
||||
|
||||
# Load backed up contents.
|
||||
backedup = operating_system.read_file(
|
||||
expected_backup_path, strategy._codec)
|
||||
|
||||
# Assert that the config has the overriden contents.
|
||||
self.assertEqual(expected_contents_seq[revision], overriden)
|
||||
|
||||
# Assert that the backup matches the previous config contents.
|
||||
self.assertEqual(expected_contents_seq[revision - 1], backedup)
|
||||
|
||||
# Rollback the applied overrides.
|
||||
for revision, _ in reversed(
|
||||
[e for e in enumerate(config_overrides_seq, 1)]):
|
||||
|
||||
expected_backup_path = build_backup_path(revision)
|
||||
|
||||
# Remove last overrides.
|
||||
strategy.remove_last(1)
|
||||
|
||||
# Check that the backup was removed.
|
||||
self.assertFalse(
|
||||
os.path.exists(expected_backup_path),
|
||||
"Backup revision '%d' was not removed." %
|
||||
revision)
|
||||
|
||||
# Re-load restored contents.
|
||||
restored = operating_system.read_file(
|
||||
strategy._base_config_path, strategy._codec)
|
||||
|
||||
# Assert that the config was reverted to the previous state.
|
||||
self.assertEqual(expected_contents_seq[revision - 1], restored)
|
||||
|
||||
# Test rollback all.
|
||||
####################
|
||||
|
||||
# Apply a sequence of overrides.
|
||||
for override in config_overrides_seq:
|
||||
strategy.apply_next(override)
|
||||
|
||||
num_revisions = strategy.count_revisions()
|
||||
|
||||
# Check that we have an expected number of revisions.
|
||||
self.assertEqual(len(config_overrides_seq), num_revisions)
|
||||
|
||||
# Rollback all revisions at once.
|
||||
strategy.remove_last(num_revisions + 1)
|
||||
|
||||
# Check that there are no revisions.
|
||||
self.assertEqual(0, strategy.count_revisions())
|
||||
|
||||
# Check that all backups were removed.
|
||||
for revision, _ in reversed(
|
||||
[e for e in enumerate(config_overrides_seq, 1)]):
|
||||
expected_backup_path = build_backup_path(revision)
|
||||
self.assertFalse(
|
||||
os.path.exists(expected_backup_path),
|
||||
"Backup revision '%d' was not removed." % revision)
|
||||
|
||||
# Re-load restored contents.
|
||||
restored = operating_system.read_file(
|
||||
strategy._base_config_path, strategy._codec)
|
||||
|
||||
# Assert that the config was reverted to the previous state.
|
||||
self.assertEqual(expected_contents_seq[0], restored)
|
||||
|
||||
def test_import_override_strategy(self):
|
||||
base_config_contents = {'Section_1': {'name': 'pi',
|
||||
'is_number': 'True',
|
||||
'value': '3.1415'}
|
||||
}
|
||||
|
||||
config_overrides_v1 = {'Section_1': {'name': 'sqrt(2)',
|
||||
'value': '1.4142'}
|
||||
}
|
||||
|
||||
config_overrides_v2 = {'Section_1': {'is_number': 'False'}}
|
||||
|
||||
config_overrides_seq = [config_overrides_v1, config_overrides_v2]
|
||||
expected_contents_seq = [base_config_contents, base_config_contents,
|
||||
base_config_contents]
|
||||
|
||||
codec = IniCodec()
|
||||
current_user = getpass.getuser()
|
||||
revision_dir = self._create_temp_dir()
|
||||
|
||||
with tempfile.NamedTemporaryFile() as base_config:
|
||||
|
||||
# Write initial config contents.
|
||||
operating_system.write_file(
|
||||
base_config.name, base_config_contents, codec)
|
||||
|
||||
strategy = ImportOverrideStrategy(revision_dir, 'ext')
|
||||
strategy.configure(
|
||||
base_config.name, current_user, current_user, codec, False)
|
||||
|
||||
self._assert_import_override_strategy(
|
||||
strategy, config_overrides_seq, expected_contents_seq)
|
||||
|
||||
def _assert_import_override_strategy(
|
||||
self, strategy, config_overrides_seq, expected_contents_seq):
|
||||
|
||||
def build_revision_path(revision):
|
||||
base_name = os.extsep.join(
|
||||
[os.path.basename(strategy._base_config_path),
|
||||
str(revision), strategy._revision_ext])
|
||||
return os.path.join(
|
||||
strategy._revision_dir, base_name)
|
||||
|
||||
# Test apply and rollback in sequence.
|
||||
######################################
|
||||
|
||||
# Apply a sequence of overrides.
|
||||
for revision, override in enumerate(config_overrides_seq, 1):
|
||||
|
||||
expected_import_path = build_revision_path(revision)
|
||||
|
||||
# Apply overrides.
|
||||
strategy.apply_next(override)
|
||||
|
||||
# Check there is a new import file.
|
||||
self.assertTrue(os.path.exists(expected_import_path),
|
||||
"Revision import '%d' does not exist." % revision)
|
||||
|
||||
# Load base config contents.
|
||||
base = operating_system.read_file(
|
||||
strategy._base_config_path, strategy._codec)
|
||||
|
||||
# Load import contents.
|
||||
imported = operating_system.read_file(
|
||||
expected_import_path, strategy._codec)
|
||||
|
||||
# Assert that the base config did not change.
|
||||
self.assertEqual(expected_contents_seq[revision], base)
|
||||
|
||||
# Assert that the import contents match the overrides.
|
||||
self.assertEqual(override, imported)
|
||||
|
||||
# Rollback the applied overrides.
|
||||
for revision, _ in reversed(
|
||||
[e for e in enumerate(config_overrides_seq, 1)]):
|
||||
|
||||
expected_import_path = build_revision_path(revision)
|
||||
|
||||
# Remove last overrides.
|
||||
strategy.remove_last(1)
|
||||
|
||||
# Check that the import was removed.
|
||||
self.assertFalse(
|
||||
os.path.exists(expected_import_path),
|
||||
"Revision import '%d' was not removed." %
|
||||
revision)
|
||||
|
||||
# Re-load base config contents.
|
||||
base = operating_system.read_file(
|
||||
strategy._base_config_path, strategy._codec)
|
||||
|
||||
# Assert that the base config did not change.
|
||||
self.assertEqual(expected_contents_seq[revision - 1], base)
|
||||
|
||||
# Test rollback all.
|
||||
####################
|
||||
|
||||
# Apply a sequence of overrides.
|
||||
for override in config_overrides_seq:
|
||||
strategy.apply_next(override)
|
||||
|
||||
num_revisions = strategy.count_revisions()
|
||||
|
||||
# Check that we have an expected number of revisions.
|
||||
self.assertEqual(len(config_overrides_seq), num_revisions)
|
||||
|
||||
# Rollback all revisions at once.
|
||||
strategy.remove_last(num_revisions + 1)
|
||||
|
||||
# Check that there are no revisions.
|
||||
self.assertEqual(0, strategy.count_revisions())
|
||||
|
||||
# Check that all imports were removed.
|
||||
for revision, _ in reversed(
|
||||
[e for e in enumerate(config_overrides_seq, 1)]):
|
||||
expected_backup_path = build_revision_path(revision)
|
||||
self.assertFalse(
|
||||
os.path.exists(expected_backup_path),
|
||||
"Revision import '%d' was not removed." % revision)
|
||||
|
||||
# Re-load base config contents.
|
||||
base = operating_system.read_file(
|
||||
strategy._base_config_path, strategy._codec)
|
||||
|
||||
# Assert that the base config did not change.
|
||||
self.assertEqual(expected_contents_seq[0], base)
|
||||
|
||||
def test_get_value(self):
|
||||
revision_dir = self._create_temp_dir()
|
||||
self._assert_get_value(RollingOverrideStrategy(revision_dir))
|
||||
self._assert_get_value(ImportOverrideStrategy(revision_dir, 'ext'))
|
||||
|
||||
def _assert_get_value(self, override_strategy):
|
||||
base_config_contents = {'Section_1': {'name': 'pi',
|
||||
'is_number': 'True',
|
||||
'value': '3.1415'}
|
||||
}
|
||||
|
||||
config_overrides_v1 = {'Section_1': {'name': 'sqrt(2)',
|
||||
'value': '1.4142'}
|
||||
}
|
||||
|
||||
config_overrides_v2 = {'Section_1': {'name': 'e',
|
||||
'value': '2.7183'},
|
||||
'Section_2': {'foo': 'bar'}
|
||||
}
|
||||
|
||||
codec = IniCodec()
|
||||
current_user = getpass.getuser()
|
||||
|
||||
with tempfile.NamedTemporaryFile() as base_config:
|
||||
|
||||
# Write initial config contents.
|
||||
operating_system.write_file(
|
||||
base_config.name, base_config_contents, codec)
|
||||
|
||||
manager = ConfigurationManager(
|
||||
base_config.name, current_user, current_user, codec,
|
||||
requires_root=False)
|
||||
|
||||
manager.set_override_strategy(override_strategy, 2)
|
||||
|
||||
# Test default value.
|
||||
self.assertEqual(None, manager.get_value('Section_2'))
|
||||
self.assertEqual('foo', manager.get_value('Section_2', 'foo'))
|
||||
|
||||
# Test value before applying overrides.
|
||||
self.assertEqual('pi', manager.get_value('Section_1')['name'])
|
||||
self.assertEqual('3.1415', manager.get_value('Section_1')['value'])
|
||||
|
||||
# Test value after applying overrides.
|
||||
manager.apply_override(config_overrides_v1)
|
||||
self.assertEqual('sqrt(2)', manager.get_value('Section_1')['name'])
|
||||
self.assertEqual('1.4142', manager.get_value('Section_1')['value'])
|
||||
manager.apply_override(config_overrides_v2)
|
||||
self.assertEqual('e', manager.get_value('Section_1')['name'])
|
||||
self.assertEqual('2.7183', manager.get_value('Section_1')['value'])
|
||||
self.assertEqual('bar', manager.get_value('Section_2')['foo'])
|
||||
|
||||
# Test value after removing overrides.
|
||||
manager.remove_override()
|
||||
self.assertEqual('sqrt(2)', manager.get_value('Section_1')['name'])
|
||||
self.assertEqual('1.4142', manager.get_value('Section_1')['value'])
|
||||
manager.remove_override()
|
||||
self.assertEqual('pi', manager.get_value('Section_1')['name'])
|
||||
self.assertEqual('3.1415', manager.get_value('Section_1')['value'])
|
||||
self.assertEqual(None, manager.get_value('Section_2'))
|
||||
|
||||
def test_update_configuration(self):
|
||||
revision_dir = self._create_temp_dir()
|
||||
self._assert_update_configuration(
|
||||
RollingOverrideStrategy(revision_dir))
|
||||
self._assert_update_configuration(
|
||||
ImportOverrideStrategy(revision_dir, 'ext'))
|
||||
|
||||
def _assert_update_configuration(self, override_strategy):
|
||||
base_config_contents = {'Section_1': {'name': 'pi',
|
||||
'is_number': 'True',
|
||||
'value': '3.1415'}
|
||||
}
|
||||
|
||||
config_overrides_v1 = {'Section_1': {'name': 'sqrt(2)',
|
||||
'value': '1.4142'}
|
||||
}
|
||||
|
||||
config_overrides_v2 = {'Section_1': {'name': 'e',
|
||||
'value': '2.7183'},
|
||||
'Section_2': {'foo': 'bar'}
|
||||
}
|
||||
|
||||
codec = IniCodec()
|
||||
current_user = getpass.getuser()
|
||||
|
||||
with tempfile.NamedTemporaryFile() as base_config:
|
||||
|
||||
# Write initial config contents.
|
||||
operating_system.write_file(
|
||||
base_config.name, base_config_contents, codec)
|
||||
|
||||
manager = ConfigurationManager(
|
||||
base_config.name, current_user, current_user, codec,
|
||||
requires_root=False)
|
||||
|
||||
manager.update_configuration({'System': {'name': 'c',
|
||||
'is_number': 'True',
|
||||
'value': 'N/A'}})
|
||||
|
||||
manager.set_override_strategy(override_strategy, 2)
|
||||
|
||||
# Test value before applying overrides.
|
||||
self.assertEqual('pi', manager.get_value('Section_1')['name'])
|
||||
self.assertEqual('3.1415', manager.get_value('Section_1')['value'])
|
||||
self.assertEqual('N/A', manager.get_value('System')['value'])
|
||||
self.assertEqual(0, manager.current_revision)
|
||||
|
||||
manager.update_configuration({'System': {'value': '300000000'}})
|
||||
self.assertEqual('300000000', manager.get_value('System')['value'])
|
||||
self.assertEqual(0, manager.current_revision)
|
||||
|
||||
# Test value after applying overrides.
|
||||
manager.apply_override(config_overrides_v1)
|
||||
self.assertEqual('sqrt(2)', manager.get_value('Section_1')['name'])
|
||||
self.assertEqual('1.4142', manager.get_value('Section_1')['value'])
|
||||
self.assertEqual('300000000', manager.get_value('System')['value'])
|
||||
self.assertEqual(1, manager.current_revision)
|
||||
|
||||
manager.update_configuration({'System': {'value': '299792458'}})
|
||||
|
||||
manager.apply_override(config_overrides_v2)
|
||||
self.assertEqual('e', manager.get_value('Section_1')['name'])
|
||||
self.assertEqual('2.7183', manager.get_value('Section_1')['value'])
|
||||
self.assertEqual('bar', manager.get_value('Section_2')['foo'])
|
||||
self.assertEqual('299792458', manager.get_value('System')['value'])
|
||||
self.assertEqual(2, manager.current_revision)
|
||||
|
||||
# Test value after removing overrides.
|
||||
manager.remove_override()
|
||||
self.assertEqual('sqrt(2)', manager.get_value('Section_1')['name'])
|
||||
self.assertEqual('1.4142', manager.get_value('Section_1')['value'])
|
||||
self.assertEqual(1, manager.current_revision)
|
||||
|
||||
manager.update_configuration({'System': {'value': '299792458'}})
|
||||
|
||||
manager.remove_override()
|
||||
self.assertEqual('pi', manager.get_value('Section_1')['name'])
|
||||
self.assertEqual('3.1415', manager.get_value('Section_1')['value'])
|
||||
self.assertEqual(None, manager.get_value('Section_2'))
|
||||
self.assertEqual(0, manager.current_revision)
|
||||
|
||||
manager.update_configuration({'System': {'value': 'N/A'}})
|
||||
self.assertEqual('N/A', manager.get_value('System')['value'])
|
||||
self.assertEqual(0, manager.current_revision)
|
|
@ -0,0 +1,130 @@
|
|||
# Copyright 2015 Tesora Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from trove.guestagent.common import guestagent_utils
|
||||
from trove.tests.unittests import trove_testtools
|
||||
|
||||
|
||||
class TestGuestagentUtils(trove_testtools.TestCase):
|
||||
|
||||
def test_update_dict(self):
|
||||
self.assertEqual({}, guestagent_utils.update_dict({}, {}))
|
||||
self.assertEqual({'key': 'value'},
|
||||
guestagent_utils.update_dict({}, {'key': 'value'}))
|
||||
self.assertEqual({'key': 'value'},
|
||||
guestagent_utils.update_dict({'key': 'value'}, {}))
|
||||
|
||||
data = {'rpc_address': "0.0.0.0",
|
||||
'broadcast_rpc_address': '0.0.0.0',
|
||||
'listen_address': '0.0.0.0',
|
||||
'seed_provider': [{
|
||||
'class_name':
|
||||
'org.apache.cassandra.locator.SimpleSeedProvider',
|
||||
'parameters': [{'seeds': '0.0.0.0'}]}]
|
||||
}
|
||||
|
||||
updates = {'rpc_address': "127.0.0.1",
|
||||
'seed_provider': {'parameters':
|
||||
{'seeds': '127.0.0.1'}
|
||||
}
|
||||
}
|
||||
|
||||
updated = guestagent_utils.update_dict(updates, data)
|
||||
|
||||
expected = {'rpc_address': "127.0.0.1",
|
||||
'broadcast_rpc_address': '0.0.0.0',
|
||||
'listen_address': '0.0.0.0',
|
||||
'seed_provider': [{
|
||||
'class_name':
|
||||
'org.apache.cassandra.locator.SimpleSeedProvider',
|
||||
'parameters': [{'seeds': '127.0.0.1'}]}]
|
||||
}
|
||||
|
||||
self.assertEqual(expected, updated)
|
||||
|
||||
updates = {'seed_provider':
|
||||
[{'class_name':
|
||||
'org.apache.cassandra.locator.SimpleSeedProvider'
|
||||
}]
|
||||
}
|
||||
|
||||
updated = guestagent_utils.update_dict(updates, data)
|
||||
|
||||
expected = {'rpc_address': "127.0.0.1",
|
||||
'broadcast_rpc_address': '0.0.0.0',
|
||||
'listen_address': '0.0.0.0',
|
||||
'seed_provider': [{
|
||||
'class_name':
|
||||
'org.apache.cassandra.locator.SimpleSeedProvider'}]
|
||||
}
|
||||
|
||||
self.assertEqual(expected, updated)
|
||||
|
||||
data = {'timeout': 0, 'save': [[900, 1], [300, 10]]}
|
||||
updates = {'save': [[900, 1], [300, 10], [60, 10000]]}
|
||||
updated = guestagent_utils.update_dict(updates, data)
|
||||
expected = {'timeout': 0, 'save': [[900, 1], [300, 10], [60, 10000]]}
|
||||
|
||||
self.assertEqual(expected, updated)
|
||||
|
||||
def test_build_file_path(self):
|
||||
self.assertEqual(
|
||||
'base_dir/base_name',
|
||||
guestagent_utils.build_file_path('base_dir', 'base_name'))
|
||||
|
||||
self.assertEqual(
|
||||
'base_dir/base_name.ext1',
|
||||
guestagent_utils.build_file_path('base_dir', 'base_name', 'ext1'))
|
||||
|
||||
self.assertEqual(
|
||||
'base_dir/base_name.ext1.ext2',
|
||||
guestagent_utils.build_file_path(
|
||||
'base_dir', 'base_name', 'ext1', 'ext2'))
|
||||
|
||||
def test_dict_difference(self):
|
||||
self.assertEqual(
|
||||
{}, guestagent_utils.dict_difference(
|
||||
{}, {}))
|
||||
|
||||
self.assertEqual(
|
||||
{}, guestagent_utils.dict_difference(
|
||||
{'a': 1, 'b': 2, 'c': 3}, {'a': 1, 'b': 2, 'c': 3}))
|
||||
|
||||
self.assertEqual(
|
||||
{}, guestagent_utils.dict_difference(
|
||||
{'a': 1, 'b': 2, 'c': 3}, {}))
|
||||
|
||||
self.assertEqual(
|
||||
{'a': 1, 'b': 2, 'c': 3}, guestagent_utils.dict_difference(
|
||||
{}, {'a': 1, 'b': 2, 'c': 3}))
|
||||
|
||||
self.assertEqual(
|
||||
{}, guestagent_utils.dict_difference(
|
||||
{'a': 1, 'b': 2, 'c': 3}, {'a': 1, 'c': 3}))
|
||||
|
||||
self.assertEqual(
|
||||
{'b': 2}, guestagent_utils.dict_difference(
|
||||
{'a': 1, 'c': 3}, {'a': 1, 'b': 2, 'c': 3}))
|
||||
|
||||
self.assertEqual(
|
||||
{'b': 2, 'c': {'c1': 2}},
|
||||
guestagent_utils.dict_difference(
|
||||
{'a': {'a1': 1, 'a2': 2}, 'c': {'c1': 1, 'c2': 2}},
|
||||
{'a': {'a1': 1, 'a2': 2}, 'b': 2, 'c': {'c1': 2, 'c2': 2}}))
|
||||
|
||||
self.assertEqual(
|
||||
{'a': [4, 5, 6], 'b': 2},
|
||||
guestagent_utils.dict_difference({'a': [1, 2, 3]},
|
||||
{'a': [4, 5, 6], 'b': 2}))
|
|
@ -15,14 +15,19 @@
|
|||
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import tempfile
|
||||
|
||||
from mock import call, patch
|
||||
from oslo_concurrency.processutils import UnknownArgumentError
|
||||
from testtools import ExpectedException
|
||||
|
||||
from trove.common import exception
|
||||
from trove.common.stream_codecs import (
|
||||
IdentityCodec, IniCodec, PropertiesCodec, YamlCodec)
|
||||
from trove.common import utils
|
||||
from trove.guestagent.common import guestagent_utils
|
||||
from trove.guestagent.common import operating_system
|
||||
from trove.guestagent.common.operating_system import FileMode
|
||||
from trove.tests.unittests import trove_testtools
|
||||
|
@ -30,6 +35,124 @@ from trove.tests.unittests import trove_testtools
|
|||
|
||||
class TestOperatingSystem(trove_testtools.TestCase):
|
||||
|
||||
def test_identity_file_codec(self):
|
||||
data = ("Lorem Ipsum, Lorem Ipsum\n"
|
||||
"Lorem Ipsum, Lorem Ipsum\n"
|
||||
"Lorem Ipsum, Lorem Ipsum\n")
|
||||
|
||||
self._test_file_codec(data, IdentityCodec())
|
||||
|
||||
def test_ini_file_codec(self):
|
||||
data_no_none = {"Section1": {"s1k1": 's1v1',
|
||||
"s1k2": '3.1415926535'},
|
||||
"Section2": {"s2k1": '1',
|
||||
"s2k2": 'True'}}
|
||||
|
||||
self._test_file_codec(data_no_none, IniCodec())
|
||||
|
||||
data_with_none = {"Section1": {"s1k1": 's1v1',
|
||||
"s1k2": '3.1415926535'},
|
||||
"Section2": {"s2k1": '1',
|
||||
"s2k2": 'True',
|
||||
"s2k3": None}}
|
||||
|
||||
# Keys with None values will be written without value.
|
||||
self._test_file_codec(data_with_none, IniCodec())
|
||||
|
||||
# None will be replaced with 'default_value'.
|
||||
default_value = '1'
|
||||
expected_data = guestagent_utils.update_dict(
|
||||
{"Section2": {"s2k3": default_value}}, dict(data_with_none))
|
||||
self._test_file_codec(data_with_none,
|
||||
IniCodec(default_value=default_value),
|
||||
expected_data=expected_data)
|
||||
|
||||
def test_yaml_file_codec(self):
|
||||
data = {"Section1": 's1v1',
|
||||
"Section2": {"s2k1": '1',
|
||||
"s2k2": 'True'},
|
||||
"Section3": {"Section4": {"s4k1": '3.1415926535',
|
||||
"s4k2": None}}
|
||||
}
|
||||
|
||||
self._test_file_codec(data, YamlCodec())
|
||||
self._test_file_codec(data, YamlCodec(default_flow_style=True))
|
||||
|
||||
def test_properties_file_codec(self):
|
||||
data = {'key1': [1, "str1", '127.0.0.1', 3.1415926535, True, None],
|
||||
'key2': [2.0, 3, 0, "str1 str2"],
|
||||
'key3': ['str1', 'str2'],
|
||||
'key4': [],
|
||||
'key5': 5000,
|
||||
'key6': 'str1',
|
||||
'key7': 0,
|
||||
'key8': None,
|
||||
'key9': [['str1', 'str2'], ['str3', 'str4']],
|
||||
'key10': [['str1', 'str2', 'str3'], ['str3', 'str4'], 'str5']
|
||||
}
|
||||
|
||||
self._test_file_codec(data, PropertiesCodec())
|
||||
self._test_file_codec(data, PropertiesCodec(
|
||||
string_mappings={'yes': True, 'no': False, "''": None}))
|
||||
|
||||
def _test_file_codec(self, data, read_codec, write_codec=None,
|
||||
expected_data=None,
|
||||
expected_exception=None):
|
||||
write_codec = write_codec or read_codec
|
||||
|
||||
with tempfile.NamedTemporaryFile() as test_file:
|
||||
if expected_exception:
|
||||
with expected_exception:
|
||||
operating_system.write_file(test_file.name, data,
|
||||
codec=write_codec)
|
||||
operating_system.read_file(test_file.name,
|
||||
codec=read_codec)
|
||||
else:
|
||||
operating_system.write_file(test_file.name, data,
|
||||
codec=write_codec)
|
||||
read = operating_system.read_file(test_file.name,
|
||||
codec=read_codec)
|
||||
if expected_data is not None:
|
||||
self.assertEqual(expected_data, read)
|
||||
else:
|
||||
self.assertEqual(data, read)
|
||||
|
||||
def test_read_write_file_input_validation(self):
|
||||
with ExpectedException(exception.UnprocessableEntity,
|
||||
"File does not exist: None"):
|
||||
operating_system.read_file(None)
|
||||
|
||||
with ExpectedException(exception.UnprocessableEntity,
|
||||
"File does not exist: /__DOES_NOT_EXIST__"):
|
||||
operating_system.read_file('/__DOES_NOT_EXIST__')
|
||||
|
||||
with ExpectedException(exception.UnprocessableEntity,
|
||||
"Invalid path: None"):
|
||||
operating_system.write_file(None, {})
|
||||
|
||||
@patch.object(operating_system, 'copy')
|
||||
def test_write_file_as_root(self, copy_mock):
|
||||
target_file = tempfile.NamedTemporaryFile()
|
||||
temp_file = tempfile.NamedTemporaryFile()
|
||||
|
||||
with patch('tempfile.NamedTemporaryFile', return_value=temp_file):
|
||||
operating_system.write_file(
|
||||
target_file.name, "Lorem Ipsum", as_root=True)
|
||||
copy_mock.assert_called_once_with(
|
||||
temp_file.name, target_file.name, force=True, as_root=True)
|
||||
self.assertFalse(os.path.exists(temp_file.name))
|
||||
|
||||
@patch.object(operating_system, 'copy',
|
||||
side_effect=Exception("Error while executing 'copy'."))
|
||||
def test_write_file_as_root_with_error(self, copy_mock):
|
||||
target_file = tempfile.NamedTemporaryFile()
|
||||
temp_file = tempfile.NamedTemporaryFile()
|
||||
with patch('tempfile.NamedTemporaryFile', return_value=temp_file):
|
||||
with ExpectedException(Exception, "Error while executing 'copy'."):
|
||||
operating_system.write_file(target_file.name,
|
||||
"Lorem Ipsum", as_root=True)
|
||||
self.assertFalse(os.path.exists(temp_file.name))
|
||||
|
||||
def test_modes(self):
|
||||
self._assert_modes(None, None, None, operating_system.FileMode())
|
||||
self._assert_modes(None, None, None,
|
||||
|
@ -625,6 +748,86 @@ class TestOperatingSystem(trove_testtools.TestCase):
|
|||
|
||||
def test_file_discovery(self):
|
||||
with patch.object(os.path, 'isfile', side_effect=[False, True]):
|
||||
config_file = operating_system.file_discovery(
|
||||
["/etc/mongodb.conf", "/etc/mongod.conf"])
|
||||
config_file = operating_system.file_discovery(
|
||||
["/etc/mongodb.conf", "/etc/mongod.conf"])
|
||||
self.assertEqual('/etc/mongod.conf', config_file)
|
||||
|
||||
def test_list_files_in_directory(self):
|
||||
root_path = tempfile.mkdtemp()
|
||||
try:
|
||||
all_paths = set()
|
||||
self._create_temp_fs_structure(
|
||||
root_path, 3, 3, ['txt', 'py', ''], 1, all_paths)
|
||||
|
||||
# All files in the top directory.
|
||||
self._assert_list_files(root_path, False, None, all_paths, 9)
|
||||
|
||||
# All files recursive.
|
||||
self._assert_list_files(root_path, True, None, all_paths, 27)
|
||||
|
||||
# Only '*.txt' in the top directory.
|
||||
self._assert_list_files(root_path, False, '.*\.txt$', all_paths, 3)
|
||||
|
||||
# Only '*.txt' recursive.
|
||||
self._assert_list_files(root_path, True, '.*\.txt$', all_paths, 9)
|
||||
|
||||
# Only extension-less files in the top directory.
|
||||
self._assert_list_files(root_path, False, '[^\.]*$', all_paths, 3)
|
||||
|
||||
# Only extension-less files recursive.
|
||||
self._assert_list_files(root_path, True, '[^\.]*$', all_paths, 9)
|
||||
|
||||
# Non-existing extension in the top directory.
|
||||
self._assert_list_files(root_path, False, '.*\.bak$', all_paths, 0)
|
||||
|
||||
# Non-existing extension recursive.
|
||||
self._assert_list_files(root_path, True, '.*\.bak$', all_paths, 0)
|
||||
finally:
|
||||
try:
|
||||
os.remove(root_path)
|
||||
except Exception:
|
||||
pass # Do not fail in the cleanup.
|
||||
|
||||
def _assert_list_files(self, root, recursive, pattern, all_paths, count):
|
||||
found = operating_system.list_files_in_directory(
|
||||
root, recursive=recursive, pattern=pattern)
|
||||
expected = {
|
||||
path for path in all_paths if (
|
||||
(recursive or os.path.dirname(path) == root) and (
|
||||
not pattern or re.match(
|
||||
pattern, os.path.basename(path))))}
|
||||
self.assertEqual(expected, found)
|
||||
self.assertEqual(count, len(found),
|
||||
"Incorrect number of listed files.")
|
||||
|
||||
def _create_temp_fs_structure(self, root_path,
|
||||
num_levels, num_files_per_extension,
|
||||
file_extensions, level, created_paths):
|
||||
"""Create a structure of temporary directories 'num_levels' deep with
|
||||
temporary files on each level.
|
||||
"""
|
||||
file_paths = self._create_temp_files(
|
||||
root_path, num_files_per_extension, file_extensions)
|
||||
created_paths.update(file_paths)
|
||||
|
||||
if level < num_levels:
|
||||
path = tempfile.mkdtemp(dir=root_path)
|
||||
self._create_temp_fs_structure(
|
||||
path, num_levels, num_files_per_extension,
|
||||
file_extensions, level + 1, created_paths)
|
||||
|
||||
def _create_temp_files(self, root_path, num_files_per_extension,
|
||||
file_extensions):
|
||||
"""Create 'num_files_per_extension' temporary files
|
||||
per each of the given extensions.
|
||||
"""
|
||||
files = set()
|
||||
for ext in file_extensions:
|
||||
for fileno in range(1, num_files_per_extension + 1):
|
||||
prefix = str(fileno)
|
||||
suffix = os.extsep + ext if ext else ''
|
||||
_, path = tempfile.mkstemp(prefix=prefix, suffix=suffix,
|
||||
dir=root_path)
|
||||
files.add(path)
|
||||
|
||||
return files
|
||||
|
|
Loading…
Reference in New Issue