Fix [H405] pep rule in heat/engine

Fix [H405] rule in heat/engine python
files.

Implements bp docstring-improvements

Change-Id: Iaa1541eb03c4db837ef3a0e4eb22393ba32e270f
This commit is contained in:
Peter Razumovsky 2015-09-17 18:16:53 +03:00
parent a2ec480f3c
commit 2da170c435
25 changed files with 643 additions and 800 deletions

View File

@ -28,11 +28,12 @@ LOG = logging.getLogger(__name__)
def extract_args(params):
'''
"""Extract arguments passed as parameters and return them as a dictionary.
Extract any arguments passed as parameters through the API and return them
as a dictionary. This allows us to filter the passed args and do type
conversion where appropriate
'''
"""
kwargs = {}
timeout_mins = params.get(rpc_api.PARAM_TIMEOUT)
if timeout_mins not in ('0', 0, None):
@ -170,10 +171,11 @@ def translate_filters(params):
def format_stack_outputs(stack, outputs):
'''
"""Return a representation of the given output template.
Return a representation of the given output template for the given stack
that matches the API output expectations.
'''
"""
def format_stack_output(k):
output = {
rpc_api.OUTPUT_DESCRIPTION: outputs[k].get('Description',
@ -189,10 +191,11 @@ def format_stack_outputs(stack, outputs):
def format_stack(stack, preview=False):
'''
"""Return a representation of the given stack.
Return a representation of the given stack that matches the API output
expectations.
'''
"""
updated_time = stack.updated_time and stack.updated_time.isoformat()
created_time = stack.created_time or timeutils.utcnow()
info = {
@ -222,7 +225,7 @@ def format_stack(stack, preview=False):
info.update(update_info)
# allow users to view the outputs of stacks
if (stack.action != stack.DELETE and stack.status != stack.IN_PROGRESS):
if stack.action != stack.DELETE and stack.status != stack.IN_PROGRESS:
info[rpc_api.STACK_OUTPUTS] = format_stack_outputs(stack,
stack.outputs)
@ -273,10 +276,11 @@ def format_resource_properties(resource):
def format_stack_resource(resource, detail=True, with_props=False,
with_attr=None):
'''
"""Return a representation of the given resource.
Return a representation of the given resource that matches the API output
expectations.
'''
"""
created_time = resource.created_time and resource.created_time.isoformat()
last_updated_time = (resource.updated_time and
resource.updated_time.isoformat()) or created_time
@ -432,8 +436,7 @@ def format_watch_data(wd):
def format_validate_parameter(param):
"""
Format a template parameter for validate template API call
"""Format a template parameter for validate template API call.
Formats a template parameter and its schema information from the engine's
internal representation (i.e. a Parameter object and its associated

View File

@ -27,10 +27,9 @@ LOG = logging.getLogger(__name__)
class Schema(constr.Schema):
"""
Simple schema class for attributes.
"""Simple schema class for attributes.
Schema objects are serialisable to dictionaries following a superset of
Schema objects are serializable to dictionaries following a superset of
the HOT input Parameter schema using dict().
"""
@ -76,29 +75,22 @@ class Schema(constr.Schema):
@classmethod
def from_attribute(cls, schema_dict):
"""
Return a Property Schema corresponding to a Attribute Schema.
"""
"""Return a Property Schema corresponding to a Attribute Schema."""
msg = 'Old attribute schema is not supported'
assert isinstance(schema_dict, cls), msg
return schema_dict
def schemata(schema):
"""
Return dictionary of Schema objects for given dictionary of schemata.
"""
"""Return dictionary of Schema objects for given dictionary of schemata."""
return dict((n, Schema.from_attribute(s)) for n, s in schema.items())
class Attribute(object):
"""
An Attribute schema.
"""
"""An Attribute schema."""
def __init__(self, attr_name, schema):
"""
Initialise with a name and description.
"""Initialise with a name and schema.
:param attr_name: the name of the attribute
:param schema: attribute schema
@ -110,9 +102,7 @@ class Attribute(object):
return self.schema.support_status
def as_output(self, resource_name, template_type='cfn'):
"""
Return an Output schema entry for a provider template with the given
resource name.
"""Output entry for a provider template with the given resource name.
:param resource_name: the logical name of the provider resource
:param template_type: the template type to generate
@ -151,7 +141,8 @@ class Attributes(collections.Mapping):
@staticmethod
def as_outputs(resource_name, resource_class, template_type='cfn'):
"""
"""Dict of Output entries for a provider template with resource name.
:param resource_name: logical name of the resource
:param resource_class: resource implementation class
:returns: The attributes of the specified resource_class as a template
@ -266,13 +257,12 @@ class DynamicSchemeAttributes(Attributes):
def select_from_attribute(attribute_value, path):
'''
Select an element from an attribute value.
"""Select an element from an attribute value.
:param attribute_value: the attribute value.
:param path: a list of path components to select from the attribute.
:returns: the selected attribute component value.
'''
"""
def get_path_component(collection, key):
if not isinstance(collection, (collections.Mapping,
collections.Sequence)):

View File

@ -34,10 +34,9 @@ MEMOIZE = core.get_memoization_decorator(conf=cfg.CONF,
class Schema(collections.Mapping):
"""
Schema base class for validating properties or parameters.
"""Schema base class for validating properties or parameters.
Schema objects are serialisable to dictionaries following a superset of
Schema objects are serializable to dictionaries following a superset of
the HOT input Parameter schema using dict().
Serialises to JSON in the form::
@ -245,8 +244,7 @@ class Schema(collections.Mapping):
class AnyIndexDict(collections.Mapping):
"""
A Mapping that returns the same value for any integer index.
"""A Mapping that returns the same value for any integer index.
Used for storing the schema for a list. When converted to a dictionary,
it contains a single item with the key '*'.
@ -271,10 +269,9 @@ class AnyIndexDict(collections.Mapping):
class Constraint(collections.Mapping):
"""
Parent class for constraints on allowable values for a Property.
"""Parent class for constraints on allowable values for a Property.
Constraints are serialisable to dictionaries following the HOT input
Constraints are serializable to dictionaries following the HOT input
Parameter constraints schema using dict().
"""
@ -326,10 +323,9 @@ class Constraint(collections.Mapping):
class Range(Constraint):
"""
Constrain values within a range.
"""Constrain values within a range.
Serialises to JSON as::
Serializes to JSON as::
{
'range': {'min': <min>, 'max': <max>},
@ -394,10 +390,9 @@ class Range(Constraint):
class Length(Range):
"""
Constrain the length of values within a range.
"""Constrain the length of values within a range.
Serialises to JSON as::
Serializes to JSON as::
{
'length': {'min': <min>, 'max': <max>},
@ -439,10 +434,9 @@ class Length(Range):
class AllowedValues(Constraint):
"""
Constrain values to a predefined set.
"""Constrain values to a predefined set.
Serialises to JSON as::
Serializes to JSON as::
{
'allowed_values': [<allowed1>, <allowed2>, ...],
@ -486,10 +480,9 @@ class AllowedValues(Constraint):
class AllowedPattern(Constraint):
"""
Constrain values to a predefined regular expression pattern.
"""Constrain values to a predefined regular expression pattern.
Serialises to JSON as::
Serializes to JSON as::
{
'allowed_pattern': <pattern>,
@ -522,9 +515,7 @@ class AllowedPattern(Constraint):
class CustomConstraint(Constraint):
"""
A constraint delegating validation to an external class.
"""
"""A constraint delegating validation to an external class."""
valid_types = (Schema.STRING_TYPE, Schema.INTEGER_TYPE, Schema.NUMBER_TYPE,
Schema.BOOLEAN_TYPE, Schema.LIST_TYPE)

View File

@ -26,105 +26,101 @@ class CircularDependencyException(exception.HeatException):
@six.python_2_unicode_compatible
class Node(object):
'''A node in a dependency graph.'''
"""A node in a dependency graph."""
def __init__(self, requires=None, required_by=None):
'''
"""Initialisation of the node.
Initialise the node, optionally with a set of keys this node
requires and/or a set of keys that this node is required by.
'''
"""
self.require = requires and requires.copy() or set()
self.satisfy = required_by and required_by.copy() or set()
def copy(self):
'''Return a copy of the node.'''
"""Return a copy of the node."""
return Node(self.require, self.satisfy)
def reverse_copy(self):
'''Return a copy of the node with the edge directions reversed.'''
"""Return a copy of the node with the edge directions reversed."""
return Node(self.satisfy, self.require)
def required_by(self, source=None):
'''
List the keys that require this node, and optionally add a
new one.
'''
"""List the keys that require this node, and optionally add new one."""
if source is not None:
self.satisfy.add(source)
return iter(self.satisfy)
def requires(self, target=None):
'''
Add a key that this node requires, and optionally add a
new one.
'''
"""Add a key that this node requires, and optionally add a new one."""
if target is not None:
self.require.add(target)
return iter(self.require)
def __isub__(self, target):
'''Remove a key that this node requires.'''
"""Remove a key that this node requires."""
self.require.remove(target)
return self
def __nonzero__(self):
'''Return True if this node is not a leaf (it requires other nodes).'''
"""Return True if this node is not a leaf (it requires other nodes)."""
return bool(self.require)
def __bool__(self):
'''Return True if this node is not a leaf (it requires other nodes).'''
"""Return True if this node is not a leaf (it requires other nodes)."""
return self.__nonzero__()
def stem(self):
'''Return True if this node is a stem (required by nothing).'''
"""Return True if this node is a stem (required by nothing)."""
return not bool(self.satisfy)
def disjoint(self):
'''Return True if this node is both a leaf and a stem.'''
"""Return True if this node is both a leaf and a stem."""
return (not self) and self.stem()
def __len__(self):
'''Count the number of keys required by this node.'''
"""Count the number of keys required by this node."""
return len(self.require)
def __iter__(self):
'''Iterate over the keys required by this node.'''
"""Iterate over the keys required by this node."""
return iter(self.require)
def __str__(self):
'''Return a human-readable string representation of the node.'''
"""Return a human-readable string representation of the node."""
text = '{%s}' % ', '.join(str(n) for n in self)
return six.text_type(text)
def __repr__(self):
'''Return a string representation of the node.'''
"""Return a string representation of the node."""
return repr(self.require)
@six.python_2_unicode_compatible
class Graph(collections.defaultdict):
'''A mutable mapping of objects to nodes in a dependency graph.'''
"""A mutable mapping of objects to nodes in a dependency graph."""
def __init__(self, *args):
super(Graph, self).__init__(Node, *args)
def map(self, func):
'''
"""A dict mapping the supplied function onto each node in the graph.
Return a dictionary derived from mapping the supplied function onto
each node in the graph.
'''
"""
return dict((k, func(n)) for k, n in self.items())
def copy(self):
'''Return a copy of the graph.'''
"""Return a copy of the graph."""
return Graph(self.map(lambda n: n.copy()))
def reverse_copy(self):
'''Return a copy of the graph with the edges reversed.'''
"""Return a copy of the graph with the edges reversed."""
return Graph(self.map(lambda n: n.reverse_copy()))
def edges(self):
'''Return an iterator over all of the edges in the graph.'''
"""Return an iterator over all of the edges in the graph."""
def outgoing_edges(rqr, node):
if node.disjoint():
yield (rqr, None)
@ -135,7 +131,7 @@ class Graph(collections.defaultdict):
for i in six.iteritems(self))
def __delitem__(self, key):
'''Delete the node given by the specified key from the graph.'''
"""Delete the node given by the specified key from the graph."""
node = self[key]
for src in node.required_by():
@ -146,18 +142,17 @@ class Graph(collections.defaultdict):
return super(Graph, self).__delitem__(key)
def __str__(self):
'''Convert the graph to a human-readable string.'''
"""Convert the graph to a human-readable string."""
pairs = ('%s: %s' % (str(k), str(v)) for k, v in six.iteritems(self))
text = '{%s}' % ', '.join(pairs)
return six.text_type(text)
@staticmethod
def toposort(graph):
'''
Return a topologically sorted iterator over a dependency graph.
"""Return a topologically sorted iterator over a dependency graph.
This is a destructive operation for the graph.
'''
"""
for iteration in six.moves.xrange(len(graph)):
for key, node in six.iteritems(graph):
if not node:
@ -172,20 +167,20 @@ class Graph(collections.defaultdict):
@six.python_2_unicode_compatible
class Dependencies(object):
'''Helper class for calculating a dependency graph.'''
"""Helper class for calculating a dependency graph."""
def __init__(self, edges=None):
'''
Initialise, optionally with a list of edges, in the form of
(requirer, required) tuples.
'''
"""Initialise, optionally with a list of edges.
Each edge has the form of (requirer, required) tuple.
"""
edges = edges or []
self._graph = Graph()
for e in edges:
self += e
def __iadd__(self, edge):
'''Add another edge, in the form of a (requirer, required) tuple.'''
"""Add another edge, in the form of a (requirer, required) tuple."""
requirer, required = edge
if required is None:
@ -198,28 +193,25 @@ class Dependencies(object):
return self
def required_by(self, last):
'''
List the keys that require the specified node.
'''
"""List the keys that require the specified node."""
if last not in self._graph:
raise KeyError
return self._graph[last].required_by()
def requires(self, target):
'''
List the keys that require the specified node.
'''
"""List the keys that require the specified node."""
if target not in self._graph:
raise KeyError
return self._graph[target].requires()
def __getitem__(self, last):
'''
"""Partial dependency graph consisting of the specified node.
Return a partial dependency graph consisting of the specified node and
all those that require it only.
'''
"""
if last not in self._graph:
raise KeyError
@ -244,25 +236,20 @@ class Dependencies(object):
return Dependencies(edges)
def leaves(self):
'''
Return an iterator over all of the leaf nodes in the graph.
'''
"""Return an iterator over all of the leaf nodes in the graph."""
return (requirer for requirer, required in self._graph.items()
if not required)
def roots(self):
'''
Return an iterator over all of the root nodes in the graph.
'''
"""Return an iterator over all of the root nodes in the graph."""
return (requirer for requirer, required in self.graph(
reverse=True).items() if not required)
def translate(self, transform):
'''
Translate all of the nodes using a transform function.
"""Translate all of the nodes using a transform function.
Returns a new Dependencies object.
'''
"""
def transform_key(key):
return transform(key) if key is not None else None
@ -270,29 +257,27 @@ class Dependencies(object):
return type(self)(tuple(map(transform_key, e)) for e in edges)
def __str__(self):
'''
Return a human-readable string representation of the dependency graph
'''
"""Return a human-readable string repr of the dependency graph."""
return six.text_type(self._graph)
def __repr__(self):
'''Return a consistent string representation of the object.'''
"""Return a consistent string representation of the object."""
edge_reprs = list(repr(e) for e in self._graph.edges())
edge_reprs.sort()
text = 'Dependencies([%s])' % ', '.join(edge_reprs)
return text
def graph(self, reverse=False):
'''Return a copy of the underlying dependency graph.'''
"""Return a copy of the underlying dependency graph."""
if reverse:
return self._graph.reverse_copy()
else:
return self._graph.copy()
def __iter__(self):
'''Return a topologically sorted iterator.'''
"""Return a topologically sorted iterator."""
return Graph.toposort(self.graph())
def __reversed__(self):
'''Return a reverse topologically sorted iterator.'''
"""Return a reverse topologically sorted iterator."""
return Graph.toposort(self.graph(reverse=True))

View File

@ -63,7 +63,7 @@ class ResourceInfo(object):
"""Base mapping of resource type to implementation."""
def __new__(cls, registry, path, value, **kwargs):
'''Create a new ResourceInfo of the appropriate class.'''
"""Create a new ResourceInfo of the appropriate class."""
if cls != ResourceInfo:
# Call is already for a subclass, so pass it through
@ -221,8 +221,10 @@ class ResourceRegistry(object):
registry[name] = hook
def _register_info(self, path, info):
"""place the new info in the correct location in the registry.
path: a list of keys ['resources', 'my_server', 'OS::Nova::Server']
"""Place the new info in the correct location in the registry.
:param path: a list of keys ['resources', 'my_server',
'OS::Nova::Server']
"""
descriptive_path = '/'.join(path)
name = path[-1]
@ -285,7 +287,7 @@ class ResourceRegistry(object):
registry.pop(info.path[-1])
def matches_hook(self, resource_name, hook):
'''Return whether a resource have a hook set in the environment.
"""Return whether a resource have a hook set in the environment.
For a given resource and a hook type, we check to see if the the passed
group of resources has the right hook associated with the name.
@ -307,7 +309,7 @@ class ResourceRegistry(object):
A hook value is either `pre-create`, `pre-update` or a list of those
values. Resources support wildcard matching. The asterisk sign matches
everything.
'''
"""
ress = self._registry['resources']
for name_pattern, resource in six.iteritems(ress):
if fnmatch.fnmatchcase(resource_name, name_pattern):
@ -365,7 +367,8 @@ class ResourceRegistry(object):
def get_resource_info(self, resource_type, resource_name=None,
registry_type=None, ignore=None):
"""Find possible matches to the resource type and name.
chain the results from the global and user registry to find
Chain the results from the global and user registry to find
a match.
"""
# use cases
@ -381,6 +384,7 @@ class ResourceRegistry(object):
# - filter_by(is_user=False)
# 4) as_dict() to write to the db
# - filter_by(is_user=True)
if self.global_registry is not None:
giter = self.global_registry.iterable_by(resource_type,
resource_name)
@ -446,7 +450,7 @@ class ResourceRegistry(object):
support_status=None,
type_name=None,
version=None):
'''Return a list of valid resource types.'''
"""Return a list of valid resource types."""
# validate the support status
if support_status is not None and not support.is_valid_status(
@ -509,8 +513,10 @@ class Environment(object):
def __init__(self, env=None, user_env=True):
"""Create an Environment from a dict of varying format.
1) old-school flat parameters
2) or newer {resource_registry: bla, parameters: foo}
Next formats are available:
1) old-school flat parameters
2) or newer {resource_registry: bla, parameters: foo}
:param env: the json environment
:param user_env: boolean, if false then we manage python resources too.
@ -601,7 +607,7 @@ def get_child_environment(parent_env, child_params, item_to_remove=None,
environment.
1. resource_registry must be merged (child env should be loaded after the
parent env to take presdence).
parent env to take presence).
2. child parameters must overwrite the parent's as they won't be relevant
in the child template.

View File

@ -22,16 +22,17 @@ from heat.objects import event as event_object
class Event(object):
'''Class representing a Resource state change.'''
"""Class representing a Resource state change."""
def __init__(self, context, stack, action, status, reason,
physical_resource_id, resource_properties, resource_name,
resource_type, uuid=None, timestamp=None, id=None):
'''
"""Initialisation of the event.
Initialise from a context, stack, and event information. The timestamp
and database ID may also be initialised if the event is already in the
database.
'''
"""
self.context = context
self.stack = stack
self.action = action
@ -50,7 +51,7 @@ class Event(object):
@classmethod
def load(cls, context, event_id, event=None, stack=None):
'''Retrieve an Event from the database.'''
"""Retrieve an Event from the database."""
from heat.engine import stack as parser
ev = (event if event is not None else
@ -68,7 +69,7 @@ class Event(object):
ev.resource_type, ev.uuid, ev.created_at, ev.id)
def store(self):
'''Store the Event in the database.'''
"""Store the Event in the database."""
ev = {
'resource_name': self.resource_name,
'physical_resource_id': self.physical_resource_id,
@ -106,7 +107,7 @@ class Event(object):
return self.id
def identifier(self):
'''Return a unique identifier for the event.'''
"""Return a unique identifier for the event."""
if self.uuid is None:
return None

View File

@ -21,13 +21,10 @@ import six
@six.add_metaclass(abc.ABCMeta)
class Function(object):
"""
Abstract base class for template functions.
"""
"""Abstract base class for template functions."""
def __init__(self, stack, fn_name, args):
"""
Initialise with a Stack, the function name and the arguments.
"""Initialise with a Stack, the function name and the arguments.
All functions take the form of a single-item map in JSON::
@ -49,8 +46,7 @@ class Function(object):
return stack
def validate(self):
"""
Validate arguments without resolving the function.
"""Validate arguments without resolving the function.
Function subclasses must override this method to validate their
args.
@ -59,8 +55,7 @@ class Function(object):
@abc.abstractmethod
def result(self):
"""
Return the result of resolving the function.
"""Return the result of resolving the function.
Function subclasses must override this method to calculate their
results.
@ -74,8 +69,7 @@ class Function(object):
return dep_attrs(self.args, resource_name)
def __reduce__(self):
"""
Return a representation of the function suitable for pickling.
"""Return a representation of the function suitable for pickling.
This allows the copy module (which works by pickling and then
unpickling objects) to copy a template. Functions in the copy will
@ -84,8 +78,7 @@ class Function(object):
return dict, ([(self.fn_name, self.args)],)
def __repr__(self):
"""
Return a string representation of the function.
"""Return a string representation of the function.
The representation includes the function name, arguments and result
(if available), as well as the name of the function class.
@ -155,8 +148,7 @@ def validate(snippet):
def dependencies(snippet, path=''):
"""
Return an iterator over Resource dependencies in a template snippet.
"""Return an iterator over Resource dependencies in a template snippet.
The snippet should be already parsed to insert Function objects where
appropriate.
@ -187,12 +179,12 @@ def dependencies(snippet, path=''):
def dep_attrs(snippet, resource_name):
"""
Return an iterator over dependent attributes for specified resource_name
in a template snippet.
"""Iterator over dependent attrs for resource_name in a template snippet.
The snippet should be already parsed to insert Function objects where
appropriate.
:returns: an iterator over dependent attributes for specified resource_name
in a template snippet.
"""
if isinstance(snippet, Function):

View File

@ -13,32 +13,27 @@
class LifecyclePlugin(object):
'''
Base class for pre-op and post-op work on a stack.
"""Base class for pre-op and post-op work on a stack.
Implementations should extend this class and override the methods.
'''
"""
def do_pre_op(self, cnxt, stack, current_stack=None, action=None):
'''
Method to be run by heat before stack operations.
'''
"""Method to be run by heat before stack operations."""
pass
def do_post_op(self, cnxt, stack, current_stack=None, action=None,
is_stack_failure=False):
'''
Method to be run by heat after stack operations, including failures.
"""Method to be run by heat after stack operations, including failures.
On failure to execute all the registered pre_ops, this method will be
called if and only if the corresponding pre_op was successfully called.
On failures of the actual stack operation, this method will
be called if all the pre operations were successfully called.
'''
"""
pass
def get_ordinal(self):
'''
An ordinal used to order class instances for pre and post
operation execution.
"""Order class instances for pre and post operation execution.
The values returned by get_ordinal are used to create a partial order
for pre and post operation method invocations. The default ordinal
@ -49,5 +44,5 @@ class LifecyclePlugin(object):
class1inst will be executed after the method on class2inst.
If class1inst.ordinal() == class2inst.ordinal(), then the order of
method invocation is indeterminate.
'''
"""
return 100

View File

@ -23,9 +23,7 @@ PARAMETERS = 'parameters'
class ParameterGroups(object):
'''
The ParameterGroups specified by the stack's template.
'''
"""The ParameterGroups specified by the stack's template."""
def __init__(self, tmpl):
self.tmpl = tmpl
self.parameters = tmpl.parameters(None, {}, param_defaults={})
@ -37,10 +35,11 @@ class ParameterGroups(object):
self.parameter_groups = tmpl.get(PARAMETER_GROUPS)
def validate(self):
'''
"""Validate parameters in current parameter group.
Validate that a parameter belongs to only one Parameter Group
and that each parameter name references a valid parameter.
'''
"""
LOG.debug('Validating Parameter Groups.')
LOG.debug(self.parameter_names)
if self.parameter_groups:

View File

@ -36,7 +36,7 @@ PARAMETER_KEYS = (
class Schema(constr.Schema):
'''Parameter schema.'''
"""Parameter schema."""
KEYS = (
TYPE, DESCRIPTION, DEFAULT, SCHEMA, CONSTRAINTS, HIDDEN, LABEL
@ -127,8 +127,7 @@ class Schema(constr.Schema):
@classmethod
def from_dict(cls, param_name, schema_dict):
"""
Return a Parameter Schema object from a legacy schema dictionary.
"""Return a Parameter Schema object from a legacy schema dictionary.
:param param_name: name of the parameter owning the schema; used
for more verbose logging
@ -176,10 +175,10 @@ class Schema(constr.Schema):
@six.python_2_unicode_compatible
class Parameter(object):
'''A template parameter.'''
"""A template parameter."""
def __new__(cls, name, schema, value=None):
'''Create a new Parameter of the appropriate type.'''
"""Create a new Parameter of the appropriate type."""
if cls is not Parameter:
return super(Parameter, cls).__new__(cls)
@ -203,10 +202,11 @@ class Parameter(object):
return ParamClass(name, schema, value)
def __init__(self, name, schema, value=None):
'''
"""Initialisation of the parameter.
Initialise the Parameter with a name, schema and optional user-supplied
value.
'''
"""
self.name = name
self.schema = schema
self.user_value = value
@ -241,7 +241,7 @@ class Parameter(object):
raise exception.InvalidSchemaError(message=msg)
def value(self):
'''Get the parameter value, optionally sanitising it for output.'''
"""Get the parameter value, optionally sanitising it for output."""
if self.user_value is not None:
return self.user_value
@ -251,31 +251,32 @@ class Parameter(object):
raise exception.UserParameterMissing(key=self.name)
def has_value(self):
'''Parameter has a user or default value.'''
"""Parameter has a user or default value."""
return self.user_value is not None or self.has_default()
def hidden(self):
'''
"""Return if parameter is hidden.
Return whether the parameter should be sanitised in any output to
the user.
'''
"""
return self.schema.hidden
def description(self):
'''Return the description of the parameter.'''
"""Return the description of the parameter."""
return self.schema.description or ''
def label(self):
'''Return the label or param name.'''
"""Return the label or param name."""
return self.schema.label or self.name
def has_default(self):
'''Return whether the parameter has a default value.'''
"""Return whether the parameter has a default value."""
return (self.schema.default is not None or
self.user_default is not None)
def default(self):
'''Return the default value of the parameter.'''
"""Return the default value of the parameter."""
if self.user_default is not None:
return self.user_default
return self.schema.default
@ -284,7 +285,7 @@ class Parameter(object):
self.user_default = value
def __str__(self):
'''Return a string representation of the parameter.'''
"""Return a string representation of the parameter."""
value = self.value()
if self.hidden():
return six.text_type('******')
@ -293,14 +294,14 @@ class Parameter(object):
class NumberParam(Parameter):
'''A template parameter of type "Number".'''
"""A template parameter of type "Number"."""
def __int__(self):
'''Return an integer representation of the parameter.'''
"""Return an integer representation of the parameter."""
return int(super(NumberParam, self).value())
def __float__(self):
'''Return a float representation of the parameter.'''
"""Return a float representation of the parameter."""
return float(super(NumberParam, self).value())
def _validate(self, val, context):
@ -315,7 +316,7 @@ class NumberParam(Parameter):
class BooleanParam(Parameter):
'''A template parameter of type "Boolean".'''
"""A template parameter of type "Boolean"."""
def _validate(self, val, context):
try:
@ -333,14 +334,14 @@ class BooleanParam(Parameter):
class StringParam(Parameter):
'''A template parameter of type "String".'''
"""A template parameter of type "String"."""
def _validate(self, val, context):
self.schema.validate_value(val, context)
class CommaDelimitedListParam(Parameter, collections.Sequence):
'''A template parameter of type "CommaDelimitedList".'''
"""A template parameter of type "CommaDelimitedList"."""
def __init__(self, name, schema, value=None):
super(CommaDelimitedListParam, self).__init__(name, schema, value)
@ -373,11 +374,11 @@ class CommaDelimitedListParam(Parameter, collections.Sequence):
raise exception.UserParameterMissing(key=self.name)
def __len__(self):
'''Return the length of the list.'''
"""Return the length of the list."""
return len(self.parsed)
def __getitem__(self, index):
'''Return an item from the list.'''
"""Return an item from the list."""
return self.parsed[index]
def __str__(self):
@ -442,10 +443,11 @@ class JsonParam(Parameter):
class Parameters(collections.Mapping):
'''
The parameters of a stack, with type checking, defaults &c. specified by
"""Parameters of a stack.
The parameters of a stack, with type checking, defaults etc., specified by
the stack's template.
'''
"""
PSEUDO_PARAMETERS = (
PARAM_STACK_ID, PARAM_STACK_NAME, PARAM_REGION
@ -455,10 +457,11 @@ class Parameters(collections.Mapping):
def __init__(self, stack_identifier, tmpl, user_params=None,
param_defaults=None):
'''
"""Initialisation of the parameter.
Create the parameter container for a stack from the stack name and
template, optionally setting the user-supplied parameter values.
'''
"""
user_params = user_params or {}
param_defaults = param_defaults or {}
@ -484,12 +487,11 @@ class Parameters(collections.Mapping):
self.params[pd].set_default(param_defaults[pd])
def validate(self, validate_value=True, context=None):
'''
Validates all parameters.
"""Validates all parameters.
This method validates if all user-provided parameters are actually
defined in the template, and if all parameters are valid.
'''
"""
self._validate_tmpl_parameters()
self._validate_user_parameters()
@ -497,33 +499,32 @@ class Parameters(collections.Mapping):
param.validate(validate_value, context)
def __contains__(self, key):
'''Return whether the specified parameter exists.'''
"""Return whether the specified parameter exists."""
return key in self.params
def __iter__(self):
'''Return an iterator over the parameter names.'''
"""Return an iterator over the parameter names."""
return iter(self.params)
def __len__(self):
'''Return the number of parameters defined.'''
"""Return the number of parameters defined."""
return len(self.params)
def __getitem__(self, key):
'''Get a parameter value.'''
"""Get a parameter value."""
return self.params[key].value()
def map(self, func, filter_func=lambda p: True):
'''
"""Map the supplied filter function onto each Parameter.
Map the supplied filter function onto each Parameter (with an
optional filter function) and return the resulting dictionary.
'''
"""
return dict((n, func(p))
for n, p in six.iteritems(self.params) if filter_func(p))
def set_stack_id(self, stack_identifier):
'''
Set the StackId pseudo parameter value
'''
"""Set the StackId pseudo parameter value."""
if stack_identifier is not None:
self.params[self.PARAM_STACK_ID].schema.set_default(
stack_identifier.arn())

View File

@ -26,10 +26,10 @@ LOG = log.getLogger(__name__)
class PluginManager(object):
'''A class for managing plugin modules.'''
"""A class for managing plugin modules."""
def __init__(self, *extra_packages):
'''Initialise the Heat Engine plugin package, and any others.
"""Initialise the Heat Engine plugin package, and any others.
The heat.engine.plugins package is always created, if it does not
exist, from the plugin directories specified in the config file, and
@ -40,8 +40,7 @@ class PluginManager(object):
will load all modules in the heat.engine.resources package as well as
any user-supplied plugin modules.
'''
"""
def packages():
for package_name in extra_packages:
yield sys.modules[package_name]
@ -58,23 +57,22 @@ class PluginManager(object):
self.modules = list(modules())
def map_to_modules(self, function):
'''Iterate over the results of calling a function on every module.'''
"""Iterate over the results of calling a function on every module."""
return six.moves.map(function, self.modules)
class PluginMapping(object):
'''A class for managing plugin mappings.'''
"""A class for managing plugin mappings."""
def __init__(self, names, *args, **kwargs):
'''Initialise with the mapping name(s) and arguments.
"""Initialise with the mapping name(s) and arguments.
`names` can be a single name or a list of names. The first name found
in a given module is the one used. Each module is searched for a
function called <name>_mapping() which is called to retrieve the
mappings provided by that module. Any other arguments passed will be
passed to the mapping functions.
'''
"""
if isinstance(names, six.string_types):
names = [names]
@ -83,10 +81,10 @@ class PluginMapping(object):
self.kwargs = kwargs
def load_from_module(self, module):
'''Return the mapping specified in the given module.
"""Return the mapping specified in the given module.
If no such mapping is specified, an empty dictionary is returned.
'''
"""
for mapping_name in self.names:
mapping_func = getattr(module, mapping_name, None)
if callable(mapping_func):
@ -107,10 +105,10 @@ class PluginMapping(object):
return {}
def load_all(self, plugin_manager):
'''Iterate over the mappings from all modules in the plugin manager.
"""Iterate over the mappings from all modules in the plugin manager.
Mappings are returned as a list of (key, value) tuples.
'''
"""
mod_dicts = plugin_manager.map_to_modules(self.load_from_module)
return itertools.chain.from_iterable(six.iteritems(d) for d
in mod_dicts)

View File

@ -38,8 +38,7 @@ SCHEMA_KEYS = (
class Schema(constr.Schema):
"""
Schema class for validating resource properties.
"""Schema class for validating resource properties.
This class is used for defining schema constraints for resource properties.
It inherits generic validation features from the base Schema class and add
@ -74,9 +73,7 @@ class Schema(constr.Schema):
@classmethod
def from_legacy(cls, schema_dict):
"""
Return a Property Schema object from a legacy schema dictionary.
"""
"""Return a Property Schema object from a legacy schema dictionary."""
# Check for fully-fledged Schema objects
if isinstance(schema_dict, cls):
@ -134,8 +131,7 @@ class Schema(constr.Schema):
@classmethod
def from_parameter(cls, param):
"""
Return a Property Schema corresponding to a Parameter Schema.
"""Return a Property Schema corresponding to a Parameter Schema.
Convert a parameter schema from a provider template to a property
Schema for the corresponding resource facade.
@ -169,8 +165,7 @@ class Schema(constr.Schema):
default=param.default)
def allowed_param_prop_type(self):
"""
Return allowed type of Property Schema converted from parameter.
"""Return allowed type of Property Schema converted from parameter.
Especially, when generating Schema from parameter, Integer Property
Schema will be supplied by Number parameter.
@ -196,8 +191,7 @@ class Schema(constr.Schema):
def schemata(schema_dicts):
"""
Return dictionary of Schema objects for given dictionary of schemata.
"""Return dictionary of Schema objects for given dictionary of schemata.
The input schemata are converted from the legacy (dictionary-based)
format to Schema objects where necessary.
@ -354,9 +348,7 @@ class Properties(collections.Mapping):
@staticmethod
def schema_from_params(params_snippet):
"""
Convert a template snippet that defines parameters
into a properties schema
"""Convert a template snippet with parameters into a properties schema.
:param params_snippet: parameter definition from a template
:returns: an equivalent properties schema for the specified params
@ -467,9 +459,7 @@ class Properties(collections.Mapping):
@staticmethod
def _param_def_from_prop(schema):
"""
Return a template parameter definition corresponding to a property.
"""
"""Return a template parameter definition corresponding to property."""
param_type_map = {
schema.INTEGER: parameters.Schema.NUMBER,
schema.STRING: parameters.Schema.STRING,
@ -512,9 +502,7 @@ class Properties(collections.Mapping):
@staticmethod
def _prop_def_from_prop(name, schema):
"""
Return a provider template property definition for a property.
"""
"""Return a provider template property definition for a property."""
if schema.type == Schema.LIST:
return {'Fn::Split': [',', {'Ref': name}]}
else:
@ -522,10 +510,7 @@ class Properties(collections.Mapping):
@staticmethod
def _hot_param_def_from_prop(schema):
"""
Return parameter definition corresponding to a property for
hot template.
"""
"""Parameter definition corresponding to property for hot template."""
param_type_map = {
schema.INTEGER: hot_param.HOTParamSchema.NUMBER,
schema.STRING: hot_param.HOTParamSchema.STRING,
@ -564,15 +549,12 @@ class Properties(collections.Mapping):
@staticmethod
def _hot_prop_def_from_prop(name, schema):
"""
Return a provider template property definition for a property.
"""
"""Return a provider template property definition for a property."""
return {'get_param': name}
@classmethod
def schema_to_parameters_and_properties(cls, schema, template_type='cfn'):
"""Generates properties with params resolved for a resource's
properties_schema.
"""Generates properties with params resolved for a schema.
:param schema: A resource type's properties_schema
:returns: A tuple of params and properties dicts

View File

@ -216,13 +216,13 @@ class Resource(object):
self.uuid = stack.cache_data[name]['uuid']
def rpc_client(self):
'''Return a client for making engine RPC calls.'''
"""Return a client for making engine RPC calls."""
if not self._rpc_client:
self._rpc_client = rpc_client.EngineClient()
return self._rpc_client
def _load_data(self, resource):
'''Load the resource state from its DB representation.'''
"""Load the resource state from its DB representation."""
self.resource_id = resource.nova_instance
self.action = resource.action
self.status = resource.status
@ -311,7 +311,7 @@ class Resource(object):
self.translate_properties()
def __eq__(self, other):
'''Allow == comparison of two resources.'''
"""Allow == comparison of two resources."""
# For the purposes of comparison, we declare two resource objects
# equal if their names and parsed_templates are the same
if isinstance(other, Resource):
@ -320,7 +320,7 @@ class Resource(object):
return NotImplemented
def __ne__(self, other):
'''Allow != comparison of two resources.'''
"""Allow != comparison of two resources."""
result = self.__eq__(other)
if result is NotImplemented:
return result
@ -365,7 +365,7 @@ class Resource(object):
)
def _break_if_required(self, action, hook):
'''Block the resource until the hook is cleared if there is one.'''
"""Block the resource until the hook is cleared if there is one."""
if self.stack.env.registry.matches_hook(self.name, hook):
self._add_event(self.action, self.status,
_("%(a)s paused until Hook %(h)s is cleared")
@ -400,7 +400,9 @@ class Resource(object):
return self.t.resource_type
def has_interface(self, resource_type):
"""Check to see if this resource is either mapped to resource_type
"""Check if resource is mapped to resource_type or is "resource_type".
Check to see if this resource is either mapped to resource_type
or is a "resource_type".
"""
if self.type() == resource_type:
@ -410,26 +412,25 @@ class Resource(object):
return ri.name == resource_type
def implementation_signature(self):
'''
Return a tuple defining the implementation.
"""Return a tuple defining the implementation.
This should be broken down into a definition and an
implementation version.
'''
"""
return (self.__class__.__name__, self.support_status.version)
def identifier(self):
'''Return an identifier for this resource.'''
"""Return an identifier for this resource."""
return identifier.ResourceIdentifier(resource_name=self.name,
**self.stack.identifier())
def parsed_template(self, section=None, default=None):
'''
Return the parsed template data for the resource. May be limited to
only one section of the data, in which case a default value may also
be supplied.
'''
"""Return the parsed template data for the resource.
May be limited to only one section of the data, in which case a default
value may also be supplied.
"""
default = default or {}
if section is None:
template = self.t
@ -445,11 +446,11 @@ class Resource(object):
return self.t.freeze(**args)
def update_template_diff(self, after, before):
'''
Returns the difference between the before and after json snippets. If
something has been removed in after which exists in before we set it to
None.
'''
"""Returns the difference between the before and after json snippets.
If something has been removed in after which exists in before we set it
to None.
"""
# Create a set containing the keys in both current and update template
template_keys = set(six.iterkeys(before))
template_keys.update(set(six.iterkeys(after)))
@ -461,13 +462,13 @@ class Resource(object):
return dict((k, after.get(k)) for k in changed_keys_set)
def update_template_diff_properties(self, after_props, before_props):
'''
Returns the changed Properties between the before and after properties.
If any property having immutable as True is updated,
raises NotSupported error.
"""The changed Properties between the before and after properties.
If any property having immutable as True is updated, raises
NotSupported error.
If any properties have changed which are not in
update_allowed_properties, raises UpdateReplace.
'''
"""
update_allowed_set = set(self.update_allowed_properties)
immutable_set = set()
for (psk, psv) in six.iteritems(self.properties.props):
@ -518,10 +519,11 @@ class Resource(object):
deps += (self, None)
def required_by(self):
'''
Returns a list of names of resources which directly require this
"""List of resources' names which require the resource as dependency.
Returns a list of resources' names which directly require this
resource as a dependency.
'''
"""
return list(
[r.name for r in self.stack.dependencies.required_by(self)])
@ -592,7 +594,7 @@ class Resource(object):
@contextlib.contextmanager
def _action_recorder(self, action, expected_exceptions=tuple()):
'''Return a context manager to record the progress of an action.
"""Return a context manager to record the progress of an action.
Upon entering the context manager, the state is set to IN_PROGRESS.
Upon exiting, the state will be set to COMPLETE if no exception was
@ -601,7 +603,7 @@ class Resource(object):
Expected exceptions are re-raised, with the Resource left in the
IN_PROGRESS state.
'''
"""
try:
self.state_set(action, self.IN_PROGRESS)
yield
@ -626,8 +628,7 @@ class Resource(object):
self.state_set(action, self.COMPLETE)
def action_handler_task(self, action, args=[], action_prefix=None):
'''
A task to call the Resource subclass's handler methods for an action.
"""A task to call the Resource subclass's handler methods for action.
Calls the handle_<ACTION>() method for the given action and then calls
the check_<ACTION>_complete() method with the result in a loop until it
@ -637,7 +638,7 @@ class Resource(object):
If a prefix is supplied, the handler method handle_<PREFIX>_<ACTION>()
is called instead.
'''
"""
handler_action = action.lower()
check = getattr(self, 'check_%s_complete' % handler_action, None)
@ -654,9 +655,9 @@ class Resource(object):
@scheduler.wrappertask
def _do_action(self, action, pre_func=None, resource_data=None):
'''
Perform a transition to a new state via a specified action
action should be e.g self.CREATE, self.UPDATE etc, we set
"""Perform a transition to a new state via a specified action.
Action should be e.g self.CREATE, self.UPDATE etc, we set
status based on this, the transition is handled by calling the
corresponding handle_* and check_*_complete functions
Note pre_func is an optional function reference which will
@ -667,7 +668,7 @@ class Resource(object):
finished, and if no handle_$action function is declared, then we do
nothing, useful e.g if the resource requires no action for a given
state transition
'''
"""
assert action in self.ACTIONS, 'Invalid action %s' % action
with self._action_recorder(action):
@ -681,19 +682,16 @@ class Resource(object):
self._stored_properties_data = function.resolve(self.properties.data)
def preview(self):
'''
Default implementation of Resource.preview.
"""Default implementation of Resource.preview.
This method should be overridden by child classes for specific
behavior.
'''
"""
return self
def create_convergence(self, template_id, resource_data, engine_id,
timeout):
'''
Creates the resource by invoking the scheduler TaskRunner.
'''
"""Creates the resource by invoking the scheduler TaskRunner."""
with self.lock(engine_id):
self.requires = list(
set(data[u'id'] for data in resource_data.values()
@ -709,10 +707,11 @@ class Resource(object):
@scheduler.wrappertask
def create(self):
'''
Create the resource. Subclasses should provide a handle_create() method
to customise creation.
'''
"""Create the resource.
Subclasses should provide a handle_create() method to customise
creation.
"""
action = self.CREATE
if (self.action, self.status) != (self.INIT, self.COMPLETE):
exc = exception.Error(_('State %s invalid for create')
@ -791,10 +790,11 @@ class Resource(object):
}
def adopt(self, resource_data):
'''
Adopt the existing resource. Resource subclasses can provide
a handle_adopt() method to customise adopt.
'''
"""Adopt the existing resource.
Resource subclasses can provide a handle_adopt() method to customise
adopt.
"""
self._update_stored_properties()
return self._do_action(self.ADOPT, resource_data=resource_data)
@ -863,12 +863,13 @@ class Resource(object):
def update_convergence(self, template_id, resource_data, engine_id,
timeout):
'''
"""Updates the resource.
Updates the resource by invoking the scheduler TaskRunner
and it persists the resource's current_template_id to template_id and
resource's requires to list of the required resource id from the
given resource_data and existing resource's requires.
'''
"""
def update_tmpl_id_and_requires():
self.current_template_id = template_id
self.requires = list(
@ -889,10 +890,11 @@ class Resource(object):
@scheduler.wrappertask
def update(self, after, before=None, prev_resource=None):
'''
update the resource. Subclasses should provide a handle_update() method
to customise update, the base-class handle_update will fail by default.
'''
"""Update the resource.
Subclasses should provide a handle_update() method to customise update,
the base-class handle_update will fail by default.
"""
action = self.UPDATE
assert isinstance(after, rsrc_defn.ResourceDefinition)
@ -951,25 +953,25 @@ class Resource(object):
raise ex
def prepare_for_replace(self):
'''Prepare resource for replacing.
"""Prepare resource for replacing.
Some resources requires additional actions before replace them.
If resource need to be changed before replacing, this method should
be implemented in resource class.
'''
"""
pass
def restore_after_rollback(self):
'''Restore resource after rollback.
"""Restore resource after rollback.
Some resources requires additional actions after rollback.
If resource need to be changed during rollback, this method should
be implemented in resource class.
'''
"""
pass
def check(self):
"""Checks that the physical resource is in its expected state
"""Checks that the physical resource is in its expected state.
Gets the current status of the physical resource and updates the
database accordingly. If check is not supported by the resource,
@ -1002,10 +1004,11 @@ class Resource(object):
raise exception.Error('; '.join(invalid_checks))
def suspend(self):
'''
Suspend the resource. Subclasses should provide a handle_suspend()
method to implement suspend
'''
"""Suspend the resource.
Subclasses should provide a handle_suspend() method to implement
suspend.
"""
action = self.SUSPEND
# Don't try to suspend the resource unless it's in a stable state
@ -1021,10 +1024,10 @@ class Resource(object):
return self._do_action(action)
def resume(self):
'''
Resume the resource. Subclasses should provide a handle_resume()
method to implement resume
'''
"""Resume the resource.
Subclasses should provide a handle_resume() method to implement resume.
"""
action = self.RESUME
# Allow resume a resource if it's SUSPEND_COMPLETE
@ -1040,7 +1043,7 @@ class Resource(object):
return self._do_action(action)
def snapshot(self):
'''Snapshot the resource and return the created data, if any.'''
"""Snapshot the resource and return the created data, if any."""
LOG.info(_LI('snapshotting %s'), six.text_type(self))
return self._do_action(self.SNAPSHOT)
@ -1063,8 +1066,7 @@ class Resource(object):
@staticmethod
def reduce_physical_resource_name(name, limit):
'''
Reduce length of physical resource name to a limit.
"""Reduce length of physical resource name to a limit.
The reduced name will consist of the following:
@ -1076,7 +1078,7 @@ class Resource(object):
:param name: The name to reduce the length of
:param limit: The max length limit
:returns: A name whose length is less than or equal to the limit
'''
"""
if len(name) <= limit:
return name
@ -1160,15 +1162,16 @@ class Resource(object):
)
def delete_convergence(self, template_id, input_data, engine_id, timeout):
'''Destroys the resource if it doesn't belong to given
template. The given template is suppose to be the current
template being provisioned.
"""Destroys the resource if it doesn't belong to given template.
The given template is suppose to be the current template being
provisioned.
Also, since this resource is visited as part of clean-up phase,
the needed_by should be updated. If this resource was
replaced by more recent resource, then delete this and update
the replacement resource's needed_by and replaces fields.
'''
"""
self._acquire(engine_id)
try:
self.needed_by = list(set(v for v in input_data.values()
@ -1199,10 +1202,11 @@ class Resource(object):
@scheduler.wrappertask
def delete(self):
'''
Delete the resource. Subclasses should provide a handle_delete() method
to customise deletion.
'''
"""Delete the resource.
Subclasses should provide a handle_delete() method to customise
deletion.
"""
action = self.DELETE
if (self.action, self.status) == (self.DELETE, self.COMPLETE):
@ -1236,9 +1240,7 @@ class Resource(object):
@scheduler.wrappertask
def destroy(self):
'''
Delete the resource and remove it from the database.
'''
"""Delete the resource and remove it from the database."""
yield self.delete()
if self.id is None:
@ -1263,7 +1265,7 @@ class Resource(object):
LOG.warn(_LW('db error %s'), ex)
def _store(self, metadata=None):
'''Create the resource in the database.'''
"""Create the resource in the database."""
properties_data_encrypted, properties_data = \
resource_objects.Resource.encrypt_properties_data(
@ -1294,7 +1296,7 @@ class Resource(object):
LOG.error(_LE('DB error %s'), ex)
def _add_event(self, action, status, reason):
'''Add a state change event to the database.'''
"""Add a state change event to the database."""
ev = event.Event(self.context, self.stack, action, status, reason,
self.resource_id, self.properties,
self.name, self.type())
@ -1420,7 +1422,7 @@ class Resource(object):
return None
def _show_resource(self):
"""Default implementation; should be overridden by resources
"""Default implementation; should be overridden by resources.
:returns: the map of resource information or None
"""
@ -1434,9 +1436,9 @@ class Resource(object):
return None
def _resolve_attribute(self, name):
"""
Default implementation; should be overridden by resources that expose
attributes
"""Default implementation of resolving resource's attributes.
Should be overridden by resources, that expose attributes.
:param name: The attribute to resolve
:returns: the resource attribute named key
@ -1445,9 +1447,10 @@ class Resource(object):
pass
def regenerate_info_schema(self, definition):
"""
Default implementation; should be overridden by resources that would
require schema refresh during update, ex. TemplateResource
"""Default implementation; should be overridden by resources.
Should be overridden by resources that would require schema refresh
during update, ex. TemplateResource.
:definition: Resource Definition
"""
@ -1455,9 +1458,7 @@ class Resource(object):
pass
def state_reset(self):
"""
Reset state to (INIT, COMPLETE)
"""
"""Reset state to (INIT, COMPLETE)."""
self.action = self.INIT
self.status = self.COMPLETE
@ -1479,7 +1480,7 @@ class Resource(object):
@property
def state(self):
'''Returns state, tuple of action, status.'''
"""Returns state, tuple of action, status."""
return (self.action, self.status)
def get_reference_id(self):
@ -1489,11 +1490,10 @@ class Resource(object):
return six.text_type(self.name)
def FnGetRefId(self):
'''
For the intrinsic function Ref.
"""For the intrinsic function Ref.
:results: the id or name of the resource.
'''
"""
if self.stack.has_cache_data(self.name):
return self.stack.cache_data_reference_id(self.name)
return self.get_reference_id()
@ -1506,13 +1506,12 @@ class Resource(object):
return Resource.FnGetRefId(self)
def FnGetAtt(self, key, *path):
'''
For the intrinsic function Fn::GetAtt.
"""For the intrinsic function Fn::GetAtt.
:param key: the attribute key.
:param path: a list of path components to select from the attribute.
:returns: the attribute value.
'''
"""
if self.stack.has_cache_data(self.name):
# Load from cache for lightweight resources.
complex_key = key
@ -1544,12 +1543,11 @@ class Resource(object):
return attrs
def FnBase64(self, data):
'''
For the instrinsic function Fn::Base64.
"""For the intrinsic function Fn::Base64.
:param data: the input data.
:returns: the Base64 representation of the input data.
'''
"""
return base64.b64encode(data)
def _signal_check_action(self):
@ -1621,11 +1619,11 @@ class Resource(object):
raise failure
def signal(self, details=None, need_check=True):
'''
signal the resource. Subclasses should provide a handle_signal() method
to implement the signal, the base-class raise an exception if no
handler is implemented.
'''
"""Signal the resource.
Subclasses should provide a handle_signal() method to implement the
signal. The base-class raise an exception if no handler is implemented.
"""
if need_check:
self._signal_check_action()
self._signal_check_hook(details)
@ -1639,22 +1637,21 @@ class Resource(object):
raise exception.UpdateReplace(self.name)
def metadata_update(self, new_metadata=None):
'''
No-op for resources which don't explicitly override this method
'''
"""No-op for resources which don't explicitly override this method."""
if new_metadata:
LOG.warn(_LW("Resource %s does not implement metadata update"),
self.name)
@classmethod
def resource_to_template(cls, resource_type, template_type='cfn'):
'''
"""Template where resource's properties mapped as parameters.
:param resource_type: The resource type to be displayed in the template
:param template_type: the template type to generate, cfn or hot.
:returns: A template where the resource's properties_schema is mapped
as parameters, and the resource's attributes_schema is mapped as
outputs
'''
"""
schema = cls.properties_schema
params, props = (properties.Properties.
schema_to_parameters_and_properties(schema,
@ -1695,14 +1692,13 @@ class Resource(object):
return tmpl_dict
def data(self):
'''
Resource data for this resource
"""Resource data for this resource.
Use methods data_set and data_delete to modify the resource data
for this resource.
:returns: a dict representing the resource data for this resource.
'''
"""
if self._data is None and self.id:
try:
self._data = resource_data_objects.ResourceData.get_all(self)
@ -1712,17 +1708,16 @@ class Resource(object):
return self._data or {}
def data_set(self, key, value, redact=False):
'''Save resource's key/value pair to database.'''
"""Save resource's key/value pair to database."""
resource_data_objects.ResourceData.set(self, key, value, redact)
# force fetch all resource data from the database again
self._data = None
def data_delete(self, key):
'''
Remove a resource_data element associated to a resource.
"""Remove a resource_data element associated to a resource.
:returns: True if the key existed to delete
'''
:returns: True if the key existed to delete.
"""
try:
resource_data_objects.ResourceData.delete(self, key)
except exception.NotFound:

View File

@ -27,8 +27,9 @@ __all__ = ['ResourceDefinition']
class ResourceDefinitionCore(object):
"""
A definition of a resource, independent of any particular template format.
"""A definition of a resource.
Independent of any particular template format.
"""
DELETION_POLICIES = (
@ -40,8 +41,7 @@ class ResourceDefinitionCore(object):
def __init__(self, name, resource_type, properties=None, metadata=None,
depends=None, deletion_policy=None, update_policy=None,
description=None):
"""
Initialise with the parsed definition of a resource.
"""Initialise with the parsed definition of a resource.
Any intrinsic functions present in any of the sections should have been
parsed into Function objects before constructing the definition.
@ -95,8 +95,7 @@ class ResourceDefinitionCore(object):
self._hash ^= _hash_data(update_policy)
def freeze(self, **overrides):
"""
Return a frozen resource definition, with all functions resolved.
"""Return a frozen resource definition, with all functions resolved.
This return a new resource definition with fixed data (containing no
intrinsic functions). Named arguments passed to this method override
@ -122,8 +121,7 @@ class ResourceDefinitionCore(object):
return defn
def reparse(self, stack, template):
"""
Reinterpret the resource definition in the context of a new stack.
"""Reinterpret the resource definition in the context of a new stack.
This returns a new resource definition, with all of the functions
parsed in the context of the specified stack and template.
@ -143,7 +141,8 @@ class ResourceDefinitionCore(object):
update_policy=reparse_snippet(self._update_policy))
def dep_attrs(self, resource_name):
"""
"""Return an iterator over dependent attributes for resource_name.
Return an iterator over dependent attributes for specified
resource_name in resources' properties and metadata fields.
"""
@ -153,9 +152,7 @@ class ResourceDefinitionCore(object):
resource_name))
def dependencies(self, stack):
"""
Return the Resource objects in the given stack on which this depends.
"""
"""Return the Resource objects in given stack on which this depends."""
def path(section):
return '.'.join([self.name, section])
@ -178,8 +175,7 @@ class ResourceDefinitionCore(object):
path(METADATA)))
def properties(self, schema, context=None):
"""
Return a Properties object representing the resource properties.
"""Return a Properties object representing the resource properties.
The Properties object is constructed from the given schema, and may
require a context to validate constraints.
@ -189,16 +185,14 @@ class ResourceDefinitionCore(object):
section=PROPERTIES)
def deletion_policy(self):
"""
Return the deletion policy for the resource.
"""Return the deletion policy for the resource.
The policy will be one of those listed in DELETION_POLICIES.
"""
return function.resolve(self._deletion_policy) or self.DELETE
def update_policy(self, schema, context=None):
"""
Return a Properties object representing the resource update policy.
"""Return a Properties object representing the resource update policy.
The Properties object is constructed from the given schema, and may
require a context to validate constraints.
@ -208,15 +202,11 @@ class ResourceDefinitionCore(object):
section=UPDATE_POLICY)
def metadata(self):
"""
Return the resource metadata.
"""
"""Return the resource metadata."""
return function.resolve(self._metadata) or {}
def render_hot(self):
"""
Return a HOT snippet for the resource definition.
"""
"""Return a HOT snippet for the resource definition."""
if self._rendering is None:
attrs = {
'type': 'resource_type',
@ -239,8 +229,7 @@ class ResourceDefinitionCore(object):
return self._rendering
def __eq__(self, other):
"""
Compare this resource definition for equality with another.
"""Compare this resource definition for equality with another.
Two resource definitions are considered to be equal if they can be
generated from the same template snippet. The name of the resource is
@ -253,8 +242,7 @@ class ResourceDefinitionCore(object):
return self.render_hot() == other.render_hot()
def __ne__(self, other):
"""
Compare this resource definition for inequality with another.
"""Compare this resource definition for inequality with another.
See __eq__() for the definition of equality.
"""
@ -265,8 +253,7 @@ class ResourceDefinitionCore(object):
return not equal
def __hash__(self):
"""
Return a hash value for this resource definition.
"""Return a hash value for this resource definition.
Resource definitions that compare equal will have the same hash. (In
particular, the resource name is *not* taken into account.) See
@ -275,9 +262,7 @@ class ResourceDefinitionCore(object):
return self._hash
def __repr__(self):
"""
Return a string representation of the resource definition.
"""
"""Return a string representation of the resource definition."""
def arg_repr(arg_name):
return '='.join([arg_name, repr(getattr(self, '_%s' % arg_name))])
@ -303,8 +288,7 @@ _KEYS = (
class ResourceDefinition(ResourceDefinitionCore, collections.Mapping):
"""
A resource definition that also acts like a cfn template snippet.
"""A resource definition that also acts like a cfn template snippet.
This class exists only for backwards compatibility with existing resource
plugins and unit tests; it is deprecated and then could be replaced with
@ -319,8 +303,7 @@ class ResourceDefinition(ResourceDefinitionCore, collections.Mapping):
'resource instance.')
def __eq__(self, other):
"""
Compare this resource definition for equality with another.
"""Compare this resource definition for equality with another.
Two resource definitions are considered to be equal if they can be
generated from the same template snippet. The name of the resource is
@ -340,8 +323,7 @@ class ResourceDefinition(ResourceDefinitionCore, collections.Mapping):
return super(ResourceDefinition, self).__eq__(other)
def __iter__(self):
"""
Iterate over the available CFN template keys.
"""Iterate over the available CFN template keys.
This is for backwards compatibility with existing code that expects a
parsed-JSON template snippet.
@ -363,8 +345,7 @@ class ResourceDefinition(ResourceDefinitionCore, collections.Mapping):
yield DESCRIPTION
def __getitem__(self, key):
"""
Get the specified item from a CFN template snippet.
"""Get the specified item from a CFN template snippet.
This is for backwards compatibility with existing code that expects a
parsed-JSON template snippet.
@ -397,15 +378,12 @@ class ResourceDefinition(ResourceDefinitionCore, collections.Mapping):
raise KeyError(key)
def __hash__(self):
"""
Return a hash of the ResourceDefinition object.
"""
"""Return a hash of the ResourceDefinition object."""
warnings.warn(self._deprecation_msg, DeprecationWarning)
return super(ResourceDefinition, self).__hash__()
def __len__(self):
"""
Return the number of available CFN template keys.
"""Return the number of available CFN template keys.
This is for backwards compatibility with existing code that expects a
parsed-JSON template snippet.
@ -415,16 +393,12 @@ class ResourceDefinition(ResourceDefinitionCore, collections.Mapping):
return len(list(iter(self)))
def __repr__(self):
"""
Return a string representation of the resource definition.
"""
"""Return a string representation of the resource definition."""
return 'ResourceDefinition %s' % repr(dict(self))
def _hash_data(data):
"""
Return a stable hash value for an arbitrary parsed-JSON data snippet.
"""
"""Return a stable hash value for an arbitrary parsed-JSON data snippet."""
if isinstance(data, function.Function):
data = copy.deepcopy(data)

View File

@ -32,9 +32,9 @@ ENABLE_SLEEP = True
def task_description(task):
"""
Return a human-readable string description of a task suitable for logging
the status of the task.
"""Return a human-readable string description of a task suitable.
Description is used for logging the status of the task.
"""
name = task.__name__ if hasattr(task, '__name__') else None
if isinstance(task, types.MethodType):
@ -47,9 +47,7 @@ def task_description(task):
class Timeout(BaseException):
"""
Timeout exception, raised within a task when it has exceeded its allotted
(wallclock) running time.
"""Raised when task has exceeded its allotted (wallclock) running time.
This allows the task to perform any necessary cleanup, as well as use a
different exception to notify the controlling task if appropriate. If the
@ -58,9 +56,7 @@ class Timeout(BaseException):
"""
def __init__(self, task_runner, timeout):
"""
Initialise with the TaskRunner and a timeout period in seconds.
"""
"""Initialise with the TaskRunner and a timeout period in seconds."""
message = _('%s Timed out') % six.text_type(task_runner)
super(Timeout, self).__init__(message)
@ -113,14 +109,13 @@ class TimedCancel(Timeout):
@six.python_2_unicode_compatible
class ExceptionGroup(Exception):
'''
Container for multiple exceptions.
"""Container for multiple exceptions.
This exception is used by DependencyTaskGroup when the flag
aggregate_exceptions is set to True and it's re-raised again when all tasks
are finished. This way it can be caught later on so that the individual
exceptions can be acted upon.
'''
"""
def __init__(self, exceptions=None):
if exceptions is None:
@ -134,14 +129,12 @@ class ExceptionGroup(Exception):
@six.python_2_unicode_compatible
class TaskRunner(object):
"""
Wrapper for a resumable task (co-routine).
"""
"""Wrapper for a resumable task (co-routine)."""
def __init__(self, task, *args, **kwargs):
"""
Initialise with a task function, and arguments to be passed to it when
it is started.
"""Initialise with a task function.
Arguments to be passed to task when it is started.
The task function may be a co-routine that yields control flow between
steps.
@ -168,8 +161,7 @@ class TaskRunner(object):
eventlet.sleep(wait_time)
def __call__(self, wait_time=1, timeout=None):
"""
Start and run the task to completion.
"""Start and run the task to completion.
The task will first sleep for zero seconds, then sleep for `wait_time`
seconds between steps. To avoid sleeping, pass `None` for `wait_time`.
@ -182,8 +174,7 @@ class TaskRunner(object):
self.run_to_completion(wait_time=wait_time)
def start(self, timeout=None):
"""
Initialise the task and run its first step.
"""Initialise the task and run its first step.
If a timeout is specified, any attempt to step the task after that
number of seconds has elapsed will result in a Timeout being
@ -207,9 +198,9 @@ class TaskRunner(object):
LOG.debug('%s done (not resumable)' % six.text_type(self))
def step(self):
"""
Run another step of the task, and return True if the task is complete;
False otherwise.
"""Run another step of the task.
:returns: True if the task is complete; False otherwise.
"""
if not self.done():
assert self._runner is not None, "Task not started"
@ -231,8 +222,7 @@ class TaskRunner(object):
return self._done
def run_to_completion(self, wait_time=1):
"""
Run the task to completion.
"""Run the task to completion.
The task will sleep for `wait_time` seconds between steps. To avoid
sleeping, pass `None` for `wait_time`.
@ -273,8 +263,7 @@ class TaskRunner(object):
def wrappertask(task):
"""
Decorator for a task that needs to drive a subtask.
"""Decorator for a task that needs to drive a subtask.
This is essentially a replacement for the Python 3-only "yield from"
keyword (PEP 380), using the "yield" keyword that is supported in
@ -334,16 +323,14 @@ def wrappertask(task):
class DependencyTaskGroup(object):
"""
A task which manages a group of subtasks that have ordering dependencies.
"""
"""Task which manages group of subtasks that have ordering dependencies."""
def __init__(self, dependencies, task=lambda o: o(),
reverse=False, name=None, error_wait_time=None,
aggregate_exceptions=False):
"""
Initialise with the task dependencies and (optionally) a task to run on
each.
"""Initialise with the task dependencies.
Optionally initialise with a task to run on each.
If no task is supplied, it is assumed that the tasks are stored
directly in the dependency tree. If a task is supplied, the object
@ -420,9 +407,10 @@ class DependencyTaskGroup(object):
del self._graph[key]
def _ready(self):
"""
Iterate over all subtasks that are ready to start - i.e. all their
dependencies have been satisfied but they have not yet been started.
"""Iterate over all subtasks that are ready to start.
All subtasks' dependencies have been satisfied but they have not yet
been started.
"""
for k, n in six.iteritems(self._graph):
if not n:
@ -431,9 +419,9 @@ class DependencyTaskGroup(object):
yield k, runner
def _running(self):
"""
Iterate over all subtasks that are currently running - i.e. they have
been started but have not yet completed.
"""Iterate over all subtasks that are currently running.
Subtasks have been started but have not yet completed.
"""
running = lambda k_r: k_r[0] in self._graph and k_r[1].started()
return six.moves.filter(running, six.iteritems(self._runners))

View File

@ -92,12 +92,11 @@ class ThreadGroupManager(object):
self.add_timer(cfg.CONF.periodic_interval, self._service_task)
def _service_task(self):
"""
This is a dummy task which gets queued on the service.Service
threadgroup. Without this service.Service sees nothing running
i.e has nothing to wait() on, so the process exits..
This could also be used to trigger periodic non-stack-specific
housekeeping tasks
"""Dummy task which gets queued on the service.Service threadgroup.
Without this service.Service sees nothing running i.e has nothing to
wait() on, so the process exits. This could also be used to trigger
periodic non-stack-specific housekeeping tasks.
"""
pass
@ -118,9 +117,7 @@ class ThreadGroupManager(object):
return func(*args, **kwargs)
def start(self, stack_id, func, *args, **kwargs):
"""
Run the given method in a sub-thread.
"""
"""Run the given method in a sub-thread."""
if stack_id not in self.groups:
self.groups[stack_id] = threadgroup.ThreadGroup()
return self.groups[stack_id].add_thread(self._start_with_trace,
@ -128,10 +125,9 @@ class ThreadGroupManager(object):
func, *args, **kwargs)
def start_with_lock(self, cnxt, stack, engine_id, func, *args, **kwargs):
"""
Try to acquire a stack lock and, if successful, run the given
method in a sub-thread. Release the lock when the thread
finishes.
"""Run the method in sub-thread if acquire a stack lock is successful.
Release the lock when the thread finishes.
:param cnxt: RPC context
:param stack: Stack to be operated on
@ -149,9 +145,9 @@ class ThreadGroupManager(object):
return th
def start_with_acquired_lock(self, stack, lock, func, *args, **kwargs):
"""
Run the given method in a sub-thread and release the provided lock
when the thread finishes.
"""Run the given method in a sub-thread.
Release the provided lock when the thread finishes.
:param stack: Stack to be operated on
:type stack: heat.engine.parser.Stack
@ -164,9 +160,7 @@ class ThreadGroupManager(object):
"""
def release(gt):
"""
Callback function that will be passed to GreenThread.link().
"""
"""Callback function that will be passed to GreenThread.link()."""
lock.release()
th = self.start(stack.id, func, *args, **kwargs)
@ -174,9 +168,11 @@ class ThreadGroupManager(object):
return th
def add_timer(self, stack_id, func, *args, **kwargs):
"""
Define a periodic task, to be run in a separate thread, in the stack
threadgroups. Periodicity is cfg.CONF.periodic_interval
"""Define a periodic task in the stack threadgroups.
Defining is to be run in a separate thread.
Periodicity is cfg.CONF.periodic_interval
"""
if stack_id not in self.groups:
self.groups[stack_id] = threadgroup.ThreadGroup()
@ -196,7 +192,7 @@ class ThreadGroupManager(object):
self.groups[stack_id].stop_timers()
def stop(self, stack_id, graceful=False):
'''Stop any active threads on a stack.'''
"""Stop any active threads on a stack."""
if stack_id in self.groups:
self.events.pop(stack_id, None)
threadgroup = self.groups.pop(stack_id)
@ -223,10 +219,11 @@ class ThreadGroupManager(object):
@profiler.trace_cls("rpc")
class EngineListener(service.Service):
'''
Listen on an AMQP queue named for the engine. Allows individual
engines to communicate with each other for multi-engine support.
'''
"""Listen on an AMQP queue named for the engine.
Allows individual engines to communicate with each other for multi-engine
support.
"""
ACTIONS = (STOP_STACK, SEND) = ('stop_stack', 'send')
@ -245,14 +242,15 @@ class EngineListener(service.Service):
server.start()
def listening(self, ctxt):
'''
Respond affirmatively to confirm that the engine performing the
action is still alive.
'''
"""Confirm the engine performing the action is still alive.
Respond affirmatively to confirm that the engine performing the action
is still alive.
"""
return True
def stop_stack(self, ctxt, stack_identity):
'''Stop any active threads on a stack.'''
"""Stop any active threads on a stack."""
stack_id = stack_identity['stack_id']
self.thread_group_mgr.stop(stack_id)
@ -263,8 +261,8 @@ class EngineListener(service.Service):
@profiler.trace_cls("rpc")
class EngineService(service.Service):
"""
Manages the running instances from creation to destruction.
"""Manages the running instances from creation to destruction.
All the methods in here are called from the RPC backend. This is
all done dynamically so if a call is made via RPC that does not
have a corresponding method here, an exception will be thrown when
@ -401,9 +399,7 @@ class EngineService(service.Service):
@context.request_context
def identify_stack(self, cnxt, stack_name):
"""
The identify_stack method returns the full stack identifier for a
single, live stack given the stack name.
"""The full stack identifier for a single, live stack with stack_name.
:param cnxt: RPC context.
:param stack_name: Name or UUID of the stack to look up.
@ -449,8 +445,7 @@ class EngineService(service.Service):
@context.request_context
def show_stack(self, cnxt, stack_identity):
"""
Return detailed information about one or all stacks.
"""Return detailed information about one or all stacks.
:param cnxt: RPC context.
:param stack_identity: Name of the stack you want to show, or None
@ -473,10 +468,11 @@ class EngineService(service.Service):
show_deleted=False, show_nested=False, show_hidden=False,
tags=None, tags_any=None, not_tags=None,
not_tags_any=None):
"""
The list_stacks method returns attributes of all stacks. It supports
pagination (``limit`` and ``marker``), sorting (``sort_keys`` and
``sort_dir``) and filtering (``filters``) of the results.
"""Returns attributes of all stacks.
It supports pagination (``limit`` and ``marker``),
sorting (``sort_keys`` and ``sort_dir``) and filtering (``filters``)
of the results.
:param cnxt: RPC context
:param limit: the number of stacks to list (integer or string)
@ -516,8 +512,8 @@ class EngineService(service.Service):
show_deleted=False, show_nested=False, show_hidden=False,
tags=None, tags_any=None, not_tags=None,
not_tags_any=None):
"""
Return the number of stacks that match the given filters
"""Return the number of stacks that match the given filters.
:param cnxt: RPC context.
:param filters: a dict of ATTR:VALUE to match against stacks
:param tenant_safe: if true, scope the request by the current tenant
@ -622,8 +618,7 @@ class EngineService(service.Service):
@context.request_context
def preview_stack(self, cnxt, stack_name, template, params, files, args):
"""
Simulates a new stack using the provided template.
"""Simulates a new stack using the provided template.
Note that at this stage the template has already been fetched from the
heat-api process if using a template-url.
@ -654,9 +649,8 @@ class EngineService(service.Service):
def create_stack(self, cnxt, stack_name, template, params, files, args,
owner_id=None, nested_depth=0, user_creds_id=None,
stack_user_project_id=None, parent_resource_name=None):
"""
The create_stack method creates a new stack using the template
provided.
"""Creates a new stack using the template provided.
Note that at this stage the template has already been fetched from the
heat-api process if using a template-url.
@ -723,8 +717,7 @@ class EngineService(service.Service):
def _prepare_stack_updates(self, cnxt, current_stack, tmpl, params,
files, args):
"""
Given a stack and update context, return the current and updated stack.
"""Return the current and updated stack.
Changes *will not* be persisted, this is a helper method for
update_stack and preview_update_stack.
@ -764,9 +757,8 @@ class EngineService(service.Service):
@context.request_context
def update_stack(self, cnxt, stack_identity, template, params,
files, args):
"""
The update_stack method updates an existing stack based on the
provided template and parameters.
"""Updates an existing stack based on the provided template and params.
Note that at this stage the template has already been fetched from the
heat-api process if using a template-url.
@ -861,7 +853,8 @@ class EngineService(service.Service):
@context.request_context
def preview_update_stack(self, cnxt, stack_identity, template, params,
files, args):
"""
"""Shows the resources that would be updated.
The preview_update_stack method shows the resources that would be
changed with an update to an existing stack based on the provided
template and parameters. See update_stack for description of
@ -959,9 +952,7 @@ class EngineService(service.Service):
@context.request_context
def validate_template(self, cnxt, template, params=None, files=None):
"""
The validate_template method uses the stack parser to check
the validity of a template.
"""Uses the stack parser to check the validity of a template.
:param cnxt: RPC context.
:param template: Template of stack you want to create.
@ -1032,7 +1023,8 @@ class EngineService(service.Service):
@context.request_context
def authenticated_to_backend(self, cnxt):
"""
"""Validate the credentials in the RPC context.
Verify that the credentials in the RPC context are valid for the
current cloud backend.
"""
@ -1040,8 +1032,7 @@ class EngineService(service.Service):
@context.request_context
def get_template(self, cnxt, stack_identity):
"""
Get the template.
"""Get the template.
:param cnxt: RPC context.
:param stack_identity: Name of the stack you want to see.
@ -1065,8 +1056,7 @@ class EngineService(service.Service):
@context.request_context
def delete_stack(self, cnxt, stack_identity):
"""
The delete_stack method deletes a given stack.
"""The delete_stack method deletes a given stack.
:param cnxt: RPC context.
:param stack_identity: Name of the stack you want to delete.
@ -1123,8 +1113,8 @@ class EngineService(service.Service):
@context.request_context
def abandon_stack(self, cnxt, stack_identity):
"""
The abandon_stack method abandons a given stack.
"""The abandon_stack method abandons a given stack.
:param cnxt: RPC context.
:param stack_identity: Name of the stack you want to abandon.
"""
@ -1190,8 +1180,7 @@ class EngineService(service.Service):
return functions
def resource_schema(self, cnxt, type_name):
"""
Return the schema of the specified type.
"""Return the schema of the specified type.
:param cnxt: RPC context.
:param type_name: Name of the resource type to obtain the schema of.
@ -1235,8 +1224,7 @@ class EngineService(service.Service):
}
def generate_template(self, cnxt, type_name, template_type='cfn'):
"""
Generate a template based on the specified type.
"""Generate a template based on the specified type.
:param cnxt: RPC context.
:param type_name: Name of the resource type to generate a template for.
@ -1257,8 +1245,8 @@ class EngineService(service.Service):
@context.request_context
def list_events(self, cnxt, stack_identity, filters=None, limit=None,
marker=None, sort_keys=None, sort_dir=None):
"""
The list_events method lists all events associated with a given stack.
"""Lists all events associated with a given stack.
It supports pagination (``limit`` and ``marker``),
sorting (``sort_keys`` and ``sort_dir``) and filtering(filters)
of the results.
@ -1304,11 +1292,11 @@ class EngineService(service.Service):
for e in events]
def _authorize_stack_user(self, cnxt, stack, resource_name):
'''
Filter access to describe_stack_resource for stack in-instance users
"""Filter access to describe_stack_resource for in-instance users.
- The user must map to a User resource defined in the requested stack
- The user resource must validate OK against any Policy specified
'''
"""
# first check whether access is allowed by context user_id
if stack.access_allowed(cnxt.user_id, resource_name):
return True
@ -1355,11 +1343,12 @@ class EngineService(service.Service):
@context.request_context
def resource_signal(self, cnxt, stack_identity, resource_name, details,
sync_call=False):
'''
"""Calls resource's signal for the specified resource.
:param sync_call: indicates whether a synchronized call behavior is
expected. This is reserved for CFN WaitCondition
implementation.
'''
"""
def _resource_signal(stack, rsrc, details, need_check):
LOG.debug("signaling resource %s:%s" % (stack.name, rsrc.name))
@ -1400,9 +1389,7 @@ class EngineService(service.Service):
@context.request_context
def find_physical_resource(self, cnxt, physical_resource_id):
"""
Return an identifier for the resource with the specified physical
resource ID.
"""Return an identifier for the specified resource.
:param cnxt: RPC context.
:param physical_resource_id: The physical resource ID to look up.
@ -1441,9 +1428,7 @@ class EngineService(service.Service):
@context.request_context
def stack_suspend(self, cnxt, stack_identity):
'''
Handle request to perform suspend action on a stack
'''
"""Handle request to perform suspend action on a stack."""
def _stack_suspend(stack):
LOG.debug("suspending stack %s" % stack.name)
stack.suspend()
@ -1457,9 +1442,7 @@ class EngineService(service.Service):
@context.request_context
def stack_resume(self, cnxt, stack_identity):
'''
Handle request to perform a resume action on a stack
'''
"""Handle request to perform a resume action on a stack."""
def _stack_resume(stack):
LOG.debug("resuming stack %s" % stack.name)
stack.resume()
@ -1536,9 +1519,7 @@ class EngineService(service.Service):
@context.request_context
def stack_check(self, cnxt, stack_identity):
'''
Handle request to perform a check action on a stack
'''
"""Handle request to perform a check action on a stack."""
s = self._get_stack(cnxt, stack_identity)
stack = parser.Stack.load(cnxt, stack=s)
LOG.info(_LI("Checking stack %s"), stack.name)
@ -1571,10 +1552,11 @@ class EngineService(service.Service):
@context.request_context
def create_watch_data(self, cnxt, watch_name, stats_data):
'''
"""Creates data for CloudWatch and WaitConditions.
This could be used by CloudWatch and WaitConditions
and treat HA service events like any other CloudWatch.
'''
"""
def get_matching_watches():
if watch_name:
yield watchrule.WatchRule.load(cnxt, watch_name)
@ -1597,12 +1579,11 @@ class EngineService(service.Service):
@context.request_context
def show_watch(self, cnxt, watch_name):
"""
The show_watch method returns the attributes of one watch/alarm
"""The show_watch method returns the attributes of one watch/alarm.
:param cnxt: RPC context.
:param watch_name: Name of the watch you want to see, or None to see
all
all.
"""
if watch_name:
wrn = [watch_name]
@ -1619,14 +1600,13 @@ class EngineService(service.Service):
@context.request_context
def show_watch_metric(self, cnxt, metric_namespace=None, metric_name=None):
"""
The show_watch method returns the datapoints for a metric
"""The show_watch method returns the datapoints for a metric.
:param cnxt: RPC context.
:param metric_namespace: Name of the namespace you want to see, or None
to see all
to see all.
:param metric_name: Name of the metric you want to see, or None to see
all
all.
"""
# DB API and schema does not yet allow us to easily query by
@ -1647,12 +1627,11 @@ class EngineService(service.Service):
@context.request_context
def set_watch_state(self, cnxt, watch_name, state):
"""
Temporarily set the state of a given watch
"""Temporarily set the state of a given watch.
:param cnxt: RPC context.
:param watch_name: Name of the watch
:param state: State (must be one defined in WatchRule class
:param watch_name: Name of the watch.
:param state: State (must be one defined in WatchRule class.
"""
wr = watchrule.WatchRule.load(cnxt, watch_name)
if wr.state == rpc_api.WATCH_STATE_CEILOMETER_CONTROLLED:

View File

@ -106,9 +106,9 @@ class StackWatch(object):
actions, rule.get_details())
def periodic_watcher_task(self, sid):
"""
Periodic task, created for each stack, triggers watch-rule
evaluation for all rules defined for the stack
sid = stack ID
"""Triggers watch-rule evaluation for all rules defined for stack ID.
Periodic task, created for each stack, triggers watch-rule evaluation
for all rules defined for the stack sid = stack ID.
"""
self.check_stack_watches(sid)

View File

@ -98,7 +98,8 @@ class Stack(collections.Mapping):
current_traversal=None, tags=None, prev_raw_template_id=None,
current_deps=None, cache_data=None, resource_validate=True):
'''
"""Initialisation of stack.
Initialise from a context, name, Template object and (optionally)
Environment object. The database ID may also be initialised, if the
stack is already in the database.
@ -106,7 +107,7 @@ class Stack(collections.Mapping):
Creating a stack with cache_data creates a lightweight stack which
will not load any resources from the database and resolve the
functions from the cache_data specified.
'''
"""
def _validate_stack_name(name):
if not re.match("[a-zA-Z][a-zA-Z0-9_.-]*$", name):
@ -188,7 +189,7 @@ class Stack(collections.Mapping):
@property
def worker_client(self):
'''Return a client for making engine RPC calls.'''
"""Return a client for making engine RPC calls."""
if not self._worker_client:
self._worker_client = rpc_worker_client.WorkerClient()
return self._worker_client
@ -245,10 +246,10 @@ class Stack(collections.Mapping):
return self._resources
def iter_resources(self, nested_depth=0):
'''
Iterates over all the resources in a stack, including nested stacks up
to `nested_depth` levels below.
'''
"""Iterates over all the resources in a stack.
Iterating includes nested stacks up to `nested_depth` levels below.
"""
for res in six.itervalues(self):
yield res
@ -290,12 +291,13 @@ class Stack(collections.Mapping):
return stack_object.Stack.get_root_id(self.context, self.owner_id)
def object_path_in_stack(self):
'''
If this is not nested return (None, self), else return stack resources
and stacks in path from the root stack and including this stack
"""Return stack resources and stacks in path from the root stack.
:returns: a list of (stack_resource, stack) tuples
'''
If this is not nested return (None, self), else return stack resources
and stacks in path from the root stack and including this stack.
:returns: a list of (stack_resource, stack) tuples.
"""
if self.parent_resource and self.parent_resource.stack:
path = self.parent_resource.stack.object_path_in_stack()
path.extend([(self.parent_resource, self)])
@ -303,41 +305,44 @@ class Stack(collections.Mapping):
return [(None, self)]
def path_in_stack(self):
'''
"""Return tuples of names in path from the root stack.
If this is not nested return (None, self.name), else return tuples of
names (stack_resource.name, stack.name) in path from the root stack and
including this stack.
:returns: a list of (string, string) tuples.
'''
"""
opis = self.object_path_in_stack()
return [(stckres.name if stckres else None,
stck.name if stck else None) for stckres, stck in opis]
def total_resources(self, stack_id=None):
'''
Return the total number of resources in a stack, including nested
stacks below.
'''
"""Return the total number of resources in a stack.
Includes nested stacks below.
"""
if not stack_id:
stack_id = self.id
return stack_object.Stack.count_total_resources(self.context, stack_id)
def _set_param_stackid(self):
'''
Update self.parameters with the current ARN which is then provided
via the Parameters class as the StackId pseudo parameter
'''
"""Update self.parameters with the current ARN.
self.parameters is then provided via the Parameters class as
the StackId pseudo parameter.
"""
if not self.parameters.set_stack_id(self.identifier()):
LOG.warn(_LW("Unable to set parameters StackId identifier"))
@staticmethod
def get_dep_attrs(resources, outputs, resource_name):
'''
"""Return the set of dependent attributes for specified resource name.
Return the set of dependent attributes for specified resource name by
inspecting all resources and outputs in template.
'''
"""
attr_lists = itertools.chain((res.dep_attrs(resource_name)
for res in resources),
(function.dep_attrs(out.get('Value', ''),
@ -347,7 +352,7 @@ class Stack(collections.Mapping):
@staticmethod
def _get_dependencies(resources):
'''Return the dependency graph for a list of resources.'''
"""Return the dependency graph for a list of resources."""
deps = dependencies.Dependencies()
for res in resources:
res.add_dependencies(deps)
@ -357,7 +362,7 @@ class Stack(collections.Mapping):
@classmethod
def load(cls, context, stack_id=None, stack=None, show_deleted=True,
use_stored_context=False, force_reload=False, cache_data=None):
'''Retrieve a Stack from the database.'''
"""Retrieve a Stack from the database."""
if stack is None:
stack = stack_object.Stack.get_by_id(
context,
@ -472,10 +477,10 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.store', hide_args=False)
def store(self, backup=False):
'''
Store the stack in the database and return its ID
"""Store the stack in the database and return its ID.
If self.id is set, we update the existing stack.
'''
"""
s = self.get_kwargs_for_cloning(keep_status=True, only_db=True)
s['name'] = self._backup_name() if backup else self.name
s['backup'] = backup
@ -516,27 +521,23 @@ class Stack(collections.Mapping):
return '%s*' % self.name
def identifier(self):
'''
Return an identifier for this stack.
'''
"""Return an identifier for this stack."""
return identifier.HeatIdentifier(self.tenant_id, self.name, self.id)
def __iter__(self):
'''
Return an iterator over the resource names.
'''
"""Return an iterator over the resource names."""
return iter(self.resources)
def __len__(self):
'''Return the number of resources.'''
"""Return the number of resources."""
return len(self.resources)
def __getitem__(self, key):
'''Get the resource with the specified name.'''
"""Get the resource with the specified name."""
return self.resources[key]
def add_resource(self, resource):
'''Insert the given resource into the stack.'''
"""Insert the given resource into the stack."""
template = resource.stack.t
resource.stack = self
definition = resource.t.reparse(self, template)
@ -550,37 +551,37 @@ class Stack(collections.Mapping):
resource._store()
def remove_resource(self, resource_name):
'''Remove the resource with the specified name.'''
"""Remove the resource with the specified name."""
del self.resources[resource_name]
self.t.remove_resource(resource_name)
if self.t.id is not None:
self.t.store(self.context)
def __contains__(self, key):
'''Determine whether the stack contains the specified resource.'''
"""Determine whether the stack contains the specified resource."""
if self._resources is not None:
return key in self.resources
else:
return key in self.t[self.t.RESOURCES]
def __eq__(self, other):
'''
Compare two Stacks for equality.
"""Compare two Stacks for equality.
Stacks are considered equal only if they are identical.
'''
"""
return self is other
def __str__(self):
'''Return a human-readable string representation of the stack.'''
"""Return a human-readable string representation of the stack."""
text = 'Stack "%s" [%s]' % (self.name, self.id)
return six.text_type(text)
def resource_by_refid(self, refid):
'''
Return the resource in this stack with the specified
refid, or None if not found
'''
"""Return the resource in this stack with the specified refid.
:returns: resource in this stack with the specified refid, or None if
not found.
"""
for r in six.itervalues(self):
if r.state in (
(r.INIT, r.COMPLETE),
@ -593,18 +594,16 @@ class Stack(collections.Mapping):
return r
def register_access_allowed_handler(self, credential_id, handler):
'''
Register a function which determines whether the credentials with
a give ID can have access to a named resource.
'''
"""Register a specific function.
Register a function which determines whether the credentials with a
given ID can have access to a named resource.
"""
assert callable(handler), 'Handler is not callable'
self._access_allowed_handlers[credential_id] = handler
def access_allowed(self, credential_id, resource_name):
'''
Returns True if the credential_id is authorised to access the
resource with the specified resource_name.
'''
"""Is credential_id authorised to access resource by resource_name."""
if not self.resources:
# this also triggers lazy-loading of resources
# so is required for register_access_allowed_handler
@ -616,9 +615,7 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.validate', hide_args=False)
def validate(self):
'''
Validates the stack.
'''
"""Validates the stack."""
# TODO(sdake) Should return line number of invalid reference
# validate overall template (top-level structure)
@ -691,15 +688,16 @@ class Stack(collections.Mapping):
message=six.text_type(ex))
def requires_deferred_auth(self):
'''
"""Determine whether to perform API requests with deferred auth.
Returns whether this stack may need to perform API requests
during its lifecycle using the configured deferred authentication
method.
'''
"""
return any(res.requires_deferred_auth for res in six.itervalues(self))
def _add_event(self, action, status, reason):
'''Add a state change event to the database.'''
"""Add a state change event to the database."""
ev = event.Event(self.context, self, action, status, reason,
self.id, {},
self.name, 'OS::Heat::Stack')
@ -708,7 +706,7 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.state_set', hide_args=False)
def state_set(self, action, status, reason):
'''Update the stack state in the database.'''
"""Update the stack state in the database."""
if action not in self.ACTIONS:
raise ValueError(_("Invalid action %s") % action)
@ -738,22 +736,18 @@ class Stack(collections.Mapping):
@property
def state(self):
'''Returns state, tuple of action, status.'''
"""Returns state, tuple of action, status."""
return (self.action, self.status)
def timeout_secs(self):
'''
Return the stack action timeout in seconds.
'''
"""Return the stack action timeout in seconds."""
if self.timeout_mins is None:
return cfg.CONF.stack_action_timeout
return self.timeout_mins * 60
def preview_resources(self):
'''
Preview the stack with all of the resources.
'''
"""Preview the stack with all of the resources."""
return [resource.preview()
for resource in six.itervalues(self.resources)]
@ -764,9 +758,7 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.create', hide_args=False)
def create(self):
'''
Create the stack and all of the resources.
'''
"""Create the stack and all of the resources."""
def rollback():
if not self.disable_rollback and self.state == (self.CREATE,
self.FAILED):
@ -791,9 +783,10 @@ class Stack(collections.Mapping):
def stack_task(self, action, reverse=False, post_func=None,
error_wait_time=None,
aggregate_exceptions=False, pre_completion_func=None):
'''
A task to perform an action on the stack and all of the resources
in forward or reverse dependency order as specified by reverse
"""A task to perform an action on the stack.
All of the resources in forward or reverse dependency order as
specified by reverse.
:param action action that should be executed with stack resources
:param reverse defines if action on the resources need to be executed
@ -806,7 +799,7 @@ class Stack(collections.Mapping):
:param pre_completion_func function that need to be executed right
before action completion. Uses stack ,action, status and reason as
input parameters
'''
"""
try:
lifecycle_plugin_utils.do_pre_ops(self.context, self,
None, action)
@ -893,10 +886,11 @@ class Stack(collections.Mapping):
@profiler.trace('Stack._backup_stack', hide_args=False)
def _backup_stack(self, create_if_missing=True):
'''
"""Backup the stack.
Get a Stack containing any in-progress resources from the previous
stack state prior to an update.
'''
"""
s = stack_object.Stack.get_by_name_and_owner_id(
self.context,
self._backup_name(),
@ -918,9 +912,7 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.adopt', hide_args=False)
def adopt(self):
'''
Adopt a stack (create stack with all the existing resources).
'''
"""Adopt the stack (create stack with all the existing resources)."""
def rollback():
if not self.disable_rollback and self.state == (self.ADOPT,
self.FAILED):
@ -939,7 +931,8 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.update', hide_args=False)
def update(self, newstack, event=None):
'''
"""Update the stack.
Compare the current stack with newstack,
and where necessary create/update/delete the resources until
this stack aligns with newstack.
@ -949,7 +942,7 @@ class Stack(collections.Mapping):
Update will fail if it exceeds the specified timeout. The default is
60 minutes, set in the constructor
'''
"""
self.updated_time = datetime.datetime.utcnow()
updater = scheduler.TaskRunner(self.update_task, newstack,
event=event)
@ -957,9 +950,7 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.converge_stack', hide_args=False)
def converge_stack(self, template, action=UPDATE, new_stack=None):
"""
Updates the stack and triggers convergence for resources
"""
"""Updates the stack and triggers convergence for resources."""
if action not in [self.CREATE, self.ADOPT]:
# no back-up template for create action
self.prev_raw_template_id = getattr(self.t, 'id', None)
@ -1241,12 +1232,14 @@ class Stack(collections.Mapping):
(self.status == self.FAILED))
def _update_exception_handler(self, exc, action, update_task):
'''
Handle exceptions in update_task. Decide if we should cancel tasks or
not. Also decide if we should rollback or not, depend on disable
rollback flag if force rollback flag not trigered.
:returns: a boolean for require rollback flag
'''
"""Handle exceptions in update_task.
Decide if we should cancel tasks or not. Also decide if we should
rollback or not, depend on disable rollback flag if force rollback flag
not triggered.
:returns: a boolean for require rollback flag.
"""
self.status_reason = six.text_type(exc)
self.status = self.FAILED
if action != self.UPDATE:
@ -1382,8 +1375,8 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.delete', hide_args=False)
def delete(self, action=DELETE, backup=False, abandon=False):
'''
Delete all of the resources, and then the stack itself.
"""Delete all of the resources, and then the stack itself.
The action parameter is used to differentiate between a user
initiated delete and an automatic stack rollback after a failed
create, which amount to the same thing, but the states are recorded
@ -1392,7 +1385,7 @@ class Stack(collections.Mapping):
Note abandon is a delete where all resources have been set to a
RETAIN deletion policy, but we also don't want to delete anything
required for those resources, e.g the stack_user_project.
'''
"""
if action not in (self.DELETE, self.ROLLBACK):
LOG.error(_LE("Unexpected action %s passed to delete!"), action)
self.state_set(self.DELETE, self.FAILED,
@ -1473,14 +1466,16 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.suspend', hide_args=False)
def suspend(self):
'''
Suspend the stack, which invokes handle_suspend for all stack resources
waits for all resources to become SUSPEND_COMPLETE then declares the
"""Suspend the stack.
Invokes handle_suspend for all stack resources.
Waits for all resources to become SUSPEND_COMPLETE then declares the
stack SUSPEND_COMPLETE.
Note the default implementation for all resources is to do nothing
other than move to SUSPEND_COMPLETE, so the resources must implement
handle_suspend for this to have any effect.
'''
"""
# No need to suspend if the stack has been suspended
if self.state == (self.SUSPEND, self.COMPLETE):
LOG.info(_LI('%s is already suspended'), six.text_type(self))
@ -1496,14 +1491,16 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.resume', hide_args=False)
def resume(self):
'''
Resume the stack, which invokes handle_resume for all stack resources
waits for all resources to become RESUME_COMPLETE then declares the
"""Resume the stack.
Invokes handle_resume for all stack resources.
Waits for all resources to become RESUME_COMPLETE then declares the
stack RESUME_COMPLETE.
Note the default implementation for all resources is to do nothing
other than move to RESUME_COMPLETE, so the resources must implement
handle_resume for this to have any effect.
'''
"""
# No need to resume if the stack has been resumed
if self.state == (self.RESUME, self.COMPLETE):
LOG.info(_LI('%s is already resumed'), six.text_type(self))
@ -1519,7 +1516,7 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.snapshot', hide_args=False)
def snapshot(self, save_snapshot_func):
'''Snapshot the stack, invoking handle_snapshot on all resources.'''
"""Snapshot the stack, invoking handle_snapshot on all resources."""
self.updated_time = datetime.datetime.utcnow()
sus_task = scheduler.TaskRunner(
self.stack_task,
@ -1531,7 +1528,7 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.delete_snapshot', hide_args=False)
def delete_snapshot(self, snapshot):
'''Remove a snapshot from the backends.'''
"""Remove a snapshot from the backends."""
for name, rsrc in six.iteritems(self.resources):
snapshot_data = snapshot.data
if snapshot_data:
@ -1540,9 +1537,10 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.restore', hide_args=False)
def restore(self, snapshot):
'''
Restore the given snapshot, invoking handle_restore on all resources.
'''
"""Restore the given snapshot.
Invokes handle_restore on all resources.
"""
self.updated_time = datetime.datetime.utcnow()
env = environment.Environment(snapshot.data['environment'])
files = snapshot.data['files']
@ -1569,9 +1567,7 @@ class Stack(collections.Mapping):
@profiler.trace('Stack.output', hide_args=False)
def output(self, key):
'''
Get the value of the specified stack output.
'''
"""Get the value of the specified stack output."""
value = self.outputs[key].get('Value', '')
try:
return function.resolve(value)
@ -1580,10 +1576,11 @@ class Stack(collections.Mapping):
return None
def restart_resource(self, resource_name):
'''
"""Restart the resource specified by resource_name.
stop resource_name and all that depend on it
start resource_name and all that depend on it
'''
"""
deps = self.dependencies[self[resource_name]]
failed = False
@ -1679,13 +1676,12 @@ class Stack(collections.Mapping):
return attrs
def mark_complete(self, traversal_id):
'''
Mark the update as complete.
"""Mark the update as complete.
This currently occurs when all resources have been updated; there may
still be resources being cleaned up, but the Stack should now be in
service.
'''
"""
if traversal_id != self.current_traversal:
return
@ -1697,13 +1693,13 @@ class Stack(collections.Mapping):
self.purge_db()
def purge_db(self):
'''Cleanup database after stack has completed/failed.
"""Cleanup database after stack has completed/failed.
1. Delete previous raw template if stack completes successfully.
2. Deletes all sync points. They are no longer needed after stack
has completed/failed.
3. Delete the stack if the action is DELETE.
'''
"""
if (self.prev_raw_template_id is not None and
self.status != self.FAILED):
prev_tmpl_id = self.prev_raw_template_id
@ -1720,24 +1716,18 @@ class Stack(collections.Mapping):
pass
def time_elapsed(self):
'''
Time elapsed in seconds since the stack operation started.
'''
"""Time elapsed in seconds since the stack operation started."""
start_time = timeutils.round_to_seconds(self.updated_time or
self.created_time)
nowish = timeutils.round_to_seconds(datetime.datetime.utcnow())
return (nowish - start_time).seconds
def time_remaining(self):
'''
Time left before stack times out.
'''
"""Time left before stack times out."""
return self.timeout_secs() - self.time_elapsed()
def has_timed_out(self):
'''
Returns True if this stack has timed-out.
'''
"""Returns True if this stack has timed-out."""
if self.status == self.IN_PROGRESS:
return self.time_elapsed() > self.timeout_secs()

View File

@ -47,16 +47,15 @@ class StackLock(object):
return stack_lock_object.StackLock.get_engine_id(self.stack_id)
def try_acquire(self):
"""
Try to acquire a stack lock, but don't raise an ActionInProgress
exception or try to steal lock.
"""Try to acquire a stack lock.
Don't raise an ActionInProgress exception or try to steal lock.
"""
return stack_lock_object.StackLock.create(self.stack_id,
self.engine_id)
def acquire(self, retry=True):
"""
Acquire a lock on the stack.
"""Acquire a lock on the stack.
:param retry: When True, retry if lock was released while stealing.
:type retry: boolean
@ -112,6 +111,7 @@ class StackLock(object):
def release(self):
"""Release a stack lock."""
# Only the engine that owns the lock will be releasing it.
result = stack_lock_object.StackLock.release(self.stack_id,
self.engine_id)
@ -125,9 +125,9 @@ class StackLock(object):
@contextlib.contextmanager
def thread_lock(self):
"""
Acquire a lock and release it only if there is an exception. The
release method still needs to be scheduled to be run at the
"""Acquire a lock and release it only if there is an exception.
The release method still needs to be scheduled to be run at the
end of the thread using the Thread.link method.
"""
try:
@ -141,10 +141,9 @@ class StackLock(object):
@contextlib.contextmanager
def try_thread_lock(self):
"""
Similar to thread_lock, but acquire the lock using try_acquire
and only release it upon any exception after a successful
acquisition.
"""Similar to thread_lock, but acquire the lock using try_acquire.
Only release it upon any exception after a successful acquisition.
"""
result = None
try:

View File

@ -35,9 +35,7 @@ def make_key(*components):
def create(context, entity_id, traversal_id, is_update, stack_id):
"""
Creates an sync point entry in DB.
"""
"""Creates an sync point entry in DB."""
values = {'entity_id': entity_id, 'traversal_id': traversal_id,
'is_update': is_update, 'atomic_key': 0,
'stack_id': stack_id, 'input_data': {}}
@ -45,9 +43,7 @@ def create(context, entity_id, traversal_id, is_update, stack_id):
def get(context, entity_id, traversal_id, is_update):
"""
Retrieves a sync point entry from DB.
"""
"""Retrieves a sync point entry from DB."""
sync_point = sync_point_object.SyncPoint.get_by_key(context, entity_id,
traversal_id,
is_update)
@ -59,9 +55,7 @@ def get(context, entity_id, traversal_id, is_update):
def delete_all(context, stack_id, traversal_id):
"""
Deletes all sync points of a stack associated with a particular traversal.
"""
"""Deletes all sync points of a stack associated with a traversal_id."""
return sync_point_object.SyncPoint.delete_all_by_stack_and_traversal(
context, stack_id, traversal_id
)
@ -145,7 +139,7 @@ def sync(cnxt, entity_id, current_traversal, is_update, propagate,
class SyncPointNotFound(Exception):
'''Raised when resource update requires replacement.'''
"""Raised when resource update requires replacement."""
def __init__(self, sync_point):
msg = _("Sync Point %s not found") % (sync_point, )
super(Exception, self).__init__(six.text_type(msg))

View File

@ -90,10 +90,10 @@ def get_template_class(template_data):
class Template(collections.Mapping):
'''A stack template.'''
"""A stack template."""
def __new__(cls, template, *args, **kwargs):
'''Create a new Template of the appropriate class.'''
"""Create a new Template of the appropriate class."""
global _template_classes
if _template_classes is None:
@ -109,9 +109,7 @@ class Template(collections.Mapping):
return super(Template, cls).__new__(TemplateClass)
def __init__(self, template, template_id=None, files=None, env=None):
'''
Initialise the template with a JSON object and a set of Parameters
'''
"""Initialise the template with JSON object and set of Parameters."""
self.id = template_id
self.t = template
self.files = files or {}
@ -128,14 +126,14 @@ class Template(collections.Mapping):
@classmethod
def load(cls, context, template_id, t=None):
'''Retrieve a Template with the given ID from the database.'''
"""Retrieve a Template with the given ID from the database."""
if t is None:
t = template_object.RawTemplate.get_by_id(context, template_id)
env = environment.Environment(t.environment)
return cls(t.template, template_id=template_id, files=t.files, env=env)
def store(self, context=None):
'''Store the Template in the database and return its ID.'''
"""Store the Template in the database and return its ID."""
rt = {
'template': self.t,
'files': self.files,
@ -149,17 +147,17 @@ class Template(collections.Mapping):
return self.id
def __iter__(self):
'''Return an iterator over the section names.'''
"""Return an iterator over the section names."""
return (s for s in self.SECTIONS
if s not in self.SECTIONS_NO_DIRECT_ACCESS)
def __len__(self):
'''Return the number of sections.'''
"""Return the number of sections."""
return len(self.SECTIONS) - len(self.SECTIONS_NO_DIRECT_ACCESS)
@abc.abstractmethod
def param_schemata(self, param_defaults=None):
'''Return a dict of parameters.Schema objects for the parameters.'''
"""Return a dict of parameters.Schema objects for the parameters."""
pass
@abc.abstractmethod
@ -169,7 +167,7 @@ class Template(collections.Mapping):
@abc.abstractmethod
def parameters(self, stack_identifier, user_params, param_defaults=None):
'''Return a parameters.Parameters object for the stack.'''
"""Return a parameters.Parameters object for the stack."""
pass
@classmethod
@ -202,34 +200,33 @@ class Template(collections.Mapping):
@abc.abstractmethod
def resource_definitions(self, stack):
'''Return a dictionary of ResourceDefinition objects.'''
"""Return a dictionary of ResourceDefinition objects."""
pass
@abc.abstractmethod
def add_resource(self, definition, name=None):
'''Add a resource to the template.
"""Add a resource to the template.
The resource is passed as a ResourceDefinition object. If no name is
specified, the name from the ResourceDefinition should be used.
'''
"""
pass
def remove_resource(self, name):
'''Remove a resource from the template.'''
"""Remove a resource from the template."""
self.t.get(self.RESOURCES, {}).pop(name)
def parse(self, stack, snippet):
return parse(self.functions, stack, snippet)
def validate(self):
'''Validate the template.
"""Validate the template.
Validates the top-level sections of the template as well as syntax
inside select sections. Some sections are not checked here but in
code parts that are responsible for working with the respective
sections (e.g. parameters are check by parameters schema class).
'''
"""
t_digest = hashlib.sha256(
six.text_type(self.t).encode('utf-8')).hexdigest()
@ -265,7 +262,7 @@ class Template(collections.Mapping):
@classmethod
def create_empty_template(cls,
version=('heat_template_version', '2015-04-30')):
'''Creates an empty template.
"""Creates an empty template.
Creates a new empty template with given version. If version is
not provided, a new empty HOT template of version "2015-04-30"
@ -275,7 +272,7 @@ class Template(collections.Mapping):
template: version key and value. E.g. ("heat_template_version",
"2015-04-30")
:returns: A new empty template.
'''
"""
tmpl = {version[0]: version[1]}
return cls(tmpl)

View File

@ -15,23 +15,20 @@ from heat.common import exception
class Timestamp(object):
'''
A descriptor for writing a timestamp to the database.
'''
"""A descriptor for writing a timestamp to the database."""
def __init__(self, db_fetch, attribute):
'''
"""Initialisation of timestamp.
Initialise with a function to fetch the database representation of an
object (given a context and ID) and the name of the attribute to
retrieve.
'''
"""
self.db_fetch = db_fetch
self.attribute = attribute
def __get__(self, obj, obj_class):
'''
Get timestamp for the given object and class.
'''
"""Get timestamp for the given object and class."""
if obj is None or obj.id is None:
return None
@ -39,7 +36,7 @@ class Timestamp(object):
return getattr(o, self.attribute)
def __set__(self, obj, timestamp):
'''Update the timestamp for the given object.'''
"""Update the timestamp for the given object."""
if obj.id is None:
raise exception.ResourceNotAvailable(resource_name=obj.name)
o = self.db_fetch(obj.context, obj.id)

View File

@ -24,9 +24,7 @@ LOG = logging.getLogger(__name__)
class StackUpdate(object):
"""
A Task to perform the update of an existing stack to a new template.
"""
"""A Task to perform the update of an existing stack to a new template."""
def __init__(self, existing_stack, new_stack, previous_stack,
rollback=False, error_wait_time=None):
@ -198,11 +196,11 @@ class StackUpdate(object):
self.existing_stack.remove_resource(res_name)
def dependencies(self):
'''
Return a Dependencies object representing the dependencies between
update operations to move from an existing stack definition to a new
one.
'''
"""Return a Dependencies object.
Dependencies object representing the dependencies between update
operations to move from an existing stack definition to a new one.
"""
existing_deps = self.existing_stack.dependencies
new_deps = self.new_stack.dependencies

View File

@ -75,9 +75,10 @@ class WatchRule(object):
@classmethod
def load(cls, context, watch_name=None, watch=None):
'''
Load the watchrule object, either by name or via an existing DB object
'''
"""Load the watchrule object.
Loading object either by name or via an existing DB object.
"""
if watch is None:
try:
watch = watch_rule_objects.WatchRule.get_by_name(context,
@ -98,10 +99,10 @@ class WatchRule(object):
last_evaluated=watch.last_evaluated)
def store(self):
'''
Store the watchrule in the database and return its ID
If self.id is set, we update the existing rule
'''
"""Store the watchrule in the database and return its ID.
If self.id is set, we update the existing rule.
"""
wr_values = {
'name': self.name,
@ -118,9 +119,7 @@ class WatchRule(object):
wr_values)
def destroy(self):
'''
Delete the watchrule from the database.
'''
"""Delete the watchrule from the database."""
if self.id:
watch_rule_objects.WatchRule.delete(self.context, self.id)
@ -180,9 +179,7 @@ class WatchRule(object):
return self.NORMAL
def do_SampleCount(self):
'''
count all samples within the specified period
'''
"""Count all samples within the specified period."""
data = 0
for d in self.watch_data:
if d.created_at < self.now - self.timeperiod:
@ -330,9 +327,7 @@ class WatchRule(object):
% {'name': self.name, 'data': str(wd.data)})
def state_set(self, state):
'''
Persistently store the watch state
'''
"""Persistently store the watch state."""
if state not in self.WATCH_STATES:
raise ValueError(_("Invalid watch state %s") % state)
@ -340,10 +335,11 @@ class WatchRule(object):
self.store()
def set_watch_state(self, state):
'''
Temporarily set the watch state, returns list of functions to be
scheduled in the stack ThreadGroup for the specified state
'''
"""Temporarily set the watch state.
:returns: list of functions to be scheduled in the stack ThreadGroup
for the specified state.
"""
if state not in self.WATCH_STATES:
raise ValueError(_('Unknown watch state %s') % state)

View File

@ -37,7 +37,8 @@ LOG = logging.getLogger(__name__)
@profiler.trace_cls("rpc")
class WorkerService(service.Service):
"""
"""Service that has 'worker' actor in convergence.
This service is dedicated to handle internal messages to the 'worker'
(a.k.a. 'converger') actor in convergence. Messages on this bus will
use the 'cast' rather than 'call' method to anycast the message to
@ -265,12 +266,11 @@ class WorkerService(service.Service):
@context.request_context
def check_resource(self, cnxt, resource_id, current_traversal, data,
is_update, adopt_stack_data):
'''
Process a node in the dependency graph.
"""Process a node in the dependency graph.
The node may be associated with either an update or a cleanup of its
associated resource.
'''
"""
resource_data = dict(sync_point.deserialize_input_data(data))
rsrc, stack = self._load_resource(cnxt, resource_id, resource_data,
is_update)
@ -334,12 +334,11 @@ def construct_input_data(rsrc):
def check_stack_complete(cnxt, stack, current_traversal, sender_id, deps,
is_update):
'''
Mark the stack complete if the update is complete.
"""Mark the stack complete if the update is complete.
Complete is currently in the sense that all desired resources are in
service, not that superfluous ones have been cleaned up.
'''
"""
roots = set(deps.roots())
if (sender_id, is_update) not in roots:
@ -356,9 +355,7 @@ def check_stack_complete(cnxt, stack, current_traversal, sender_id, deps,
def propagate_check_resource(cnxt, rpc_client, next_res_id,
current_traversal, predecessors, sender_key,
sender_data, is_update, adopt_stack_data):
'''
Trigger processing of a node if all of its dependencies are satisfied.
'''
"""Trigger processing of node if all of its dependencies are satisfied."""
def do_check(entity_id, data):
rpc_client.check_resource(cnxt, entity_id, current_traversal,
data, is_update, adopt_stack_data)
@ -370,9 +367,7 @@ def propagate_check_resource(cnxt, rpc_client, next_res_id,
def check_resource_update(rsrc, template_id, resource_data, engine_id,
timeout):
'''
Create or update the Resource if appropriate.
'''
"""Create or update the Resource if appropriate."""
if rsrc.action == resource.Resource.INIT:
rsrc.create_convergence(template_id, resource_data, engine_id, timeout)
else:
@ -381,7 +376,5 @@ def check_resource_update(rsrc, template_id, resource_data, engine_id,
def check_resource_cleanup(rsrc, template_id, resource_data, engine_id,
timeout):
'''
Delete the Resource if appropriate.
'''
"""Delete the Resource if appropriate."""
rsrc.delete_convergence(template_id, resource_data, engine_id, timeout)