General naming and strings clean up

* Rename ironic_discoverd.{discover -> introspection}
* Prefer term 'introspection' over 'discovery'
* Rename DiscoveryFailed -> Error and make it log error message
* Change hook names to appropriate
* Various small fixes

Note that this is a breaking change!

Change-Id: I97842c7686251393dc9c7a40c92beb8a9484a0fa
Implements: blueprint v1-api-reform
This commit is contained in:
Dmitry Tantsur 2015-01-15 10:44:42 +01:00
parent 2c40f1a9ec
commit e29f66adf6
16 changed files with 234 additions and 237 deletions

View File

@ -27,26 +27,26 @@
; Amount of time in seconds, after which repeat periodic update of firewall.
;firewall_update_period = 15
;; Discovery process settings
;; Introspection process settings
; If set to false, discoverd will create ports only for those interfaces, that
; received IP address during ramdisk boot. Otherwise ports will be created
; for all interfaces. You should leave it as false, unless you encounter any
; bugs with this behavior.
;ports_for_inactive_interfaces = false
; Timeout after which discovery is considered failed, set to 0 to disable.
; Timeout after which introspection is considered failed, set to 0 to disable.
;timeout = 3600
; For how much time (in seconds) to keep status information about nodes after
; discovery was finished for them. Default value is 1 week.
; introspection was finished for them. Default value is 1 week.
;node_status_keep_time = 604800
; Amount of time in seconds, after which repeat clean up of timed out nodes
; and old nodes status information.
;clean_up_period = 60
; Whether to overwrite existing values in node database. In the future
; non-matching ports will be deleted as well. Setting this to true makes
; discovery a destructive operation, use with cautious.
; introspection a destructive operation, use with cautious.
;overwrite_existing = false
; Whether to enable setting IPMI credentials during discovery. This is an
; Whether to enable setting IPMI credentials during introspection. This is an
; experimental and not well tested feature, use at your own risk.
;enable_setting_ipmi_credentials = false
@ -56,13 +56,13 @@
;listen_address = 0.0.0.0
; Port to listen on.
;listen_port = 5050
; Whether to authenticate with Keystone on discovery initialization endpoint.
; Note that discovery postback endpoint is never authenticated.
; Whether to authenticate with Keystone on public HTTP endpoints.
; Note that introspection ramdisk postback endpoint is never authenticated.
;authenticate = true
;; General service settings
; SQLite3 database to store nodes under discovery, required.
; SQLite3 database to store nodes under introspection, required.
; Do not use :memory: here, it won't work.
;database =
; Comma-separated list of enabled hooks for processing pipeline.

View File

@ -18,6 +18,8 @@ from six.moves import configparser
# TODO(dtantsur): switch to oslo.db
DEFAULTS = {
# Keystone credentials
'admin_tenant_name': 'admin',
# Ironic and Keystone connection settings
'ironic_retry_attempts': '5',
'ironic_retry_period': '5',
@ -25,7 +27,7 @@ DEFAULTS = {
'manage_firewall': 'true',
'dnsmasq_interface': 'br-ctlplane',
'firewall_update_period': '15',
# Discovery process settings
# Introspection process settings
'ports_for_inactive_interfaces': 'false',
'timeout': '3600',
'node_status_keep_time': '604800',
@ -39,7 +41,6 @@ DEFAULTS = {
# General service settings
'processing_hooks': 'scheduler,validate_interfaces',
'debug': 'false',
'admin_tenant_name': 'admin',
}

View File

@ -67,13 +67,13 @@ def _clean_up(chain):
def update_filters(ironic=None):
"""Update firewall filter rules for discovery.
"""Update firewall filter rules for introspection.
Gives access to PXE boot port for any machine, except for those,
whose MAC is registered in Ironic and is not on discovery right now.
whose MAC is registered in Ironic and is not on introspection right now.
This function is called from both discovery initialization code and from
periodic task. This function is supposed to be resistant to unexpected
This function is called from both introspection initialization code and
from periodic task. This function is supposed to be resistant to unexpected
iptables state.
``init()`` function must be called once before any call to this function.
@ -92,7 +92,7 @@ def update_filters(ironic=None):
with LOCK:
macs_active = set(p.address for p in ironic.port.list(limit=0))
to_blacklist = macs_active - node_cache.macs_on_discovery()
to_blacklist = macs_active - node_cache.active_macs()
LOG.debug('Blacklisting active MAC\'s %s', to_blacklist)
# Clean up a bit to account for possible troubles on previous run

View File

@ -11,7 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handling discovery request."""
"""Handling introspection request."""
import logging
@ -24,7 +24,7 @@ from ironic_discoverd import node_cache
from ironic_discoverd import utils
LOG = logging.getLogger("ironic_discoverd.discover")
LOG = logging.getLogger("ironic_discoverd.introspect")
# See http://specs.openstack.org/openstack/ironic-specs/specs/kilo/new-ironic-state-machine.html # noqa
VALID_STATES = {'enroll', 'managed', 'inspecting'}
VALID_POWER_STATES = {'power off'}
@ -34,39 +34,34 @@ def introspect(uuid, setup_ipmi_credentials=False):
"""Initiate hardware properties introspection for a given node.
:param uuid: node uuid
:raises: DiscoveryFailed
:raises: Error
"""
ironic = utils.get_client()
try:
node = ironic.node.get(uuid)
except exceptions.NotFound:
LOG.error('Node %s cannot be found', uuid)
raise utils.DiscoveryFailed("Cannot find node %s" % uuid, code=404)
raise utils.Error("Cannot find node %s" % uuid, code=404)
except exceptions.HttpError as exc:
LOG.exception('Cannot get node %s', uuid)
raise utils.DiscoveryFailed("Cannot get node %s: %s" % (uuid, exc))
raise utils.Error("Cannot get node %s: %s" % (uuid, exc))
if (setup_ipmi_credentials and not
conf.getboolean('discoverd', 'enable_setting_ipmi_credentials')):
msg = 'IPMI credentials setup is disabled in configuration'
LOG.error(msg)
raise utils.DiscoveryFailed(msg)
raise utils.Error(
'IPMI credentials setup is disabled in configuration')
if not node.maintenance:
provision_state = node.provision_state
if provision_state and provision_state.lower() not in VALID_STATES:
msg = ('Refusing to discoverd node %s with provision state "%s" '
msg = ('Refusing to introspect node %s with provision state "%s" '
'and maintenance mode off')
LOG.error(msg, node.uuid, provision_state)
raise utils.DiscoveryFailed(msg % (node.uuid, provision_state))
raise utils.Error(msg % (node.uuid, provision_state))
power_state = node.power_state
if power_state and power_state.lower() not in VALID_POWER_STATES:
msg = ('Refusing to discover node %s with power state "%s" '
msg = ('Refusing to introspect node %s with power state "%s" '
'and maintenance mode off')
LOG.error(msg, node.uuid, power_state)
raise utils.DiscoveryFailed(msg % (node.uuid, power_state))
raise utils.Error(msg % (node.uuid, power_state))
else:
LOG.info('Node %s is in maintenance mode, skipping power and provision'
' states check')
@ -74,10 +69,9 @@ def introspect(uuid, setup_ipmi_credentials=False):
if not setup_ipmi_credentials:
validation = utils.retry_on_conflict(ironic.node.validate, node.uuid)
if not validation.power['result']:
LOG.error('Failed validation of power interface for node %s, '
'reason: %s', node.uuid, validation.power['reason'])
raise utils.DiscoveryFailed(
'Failed validation of power interface for node %s' % node.uuid)
msg = ('Failed validation of power interface for node %s, '
'reason: %s')
raise utils.Error(msg % (node.uuid, validation.power['reason']))
eventlet.greenthread.spawn_n(_background_start_discover, ironic, node,
setup_ipmi_credentials=setup_ipmi_credentials)
@ -115,6 +109,6 @@ def _background_start_discover(ironic, node, setup_ipmi_credentials):
LOG.error('Failed to power on node %s, check it\'s power '
'management configuration:\n%s', node.uuid, exc)
else:
LOG.info('Discovery environment is ready for node %s, '
LOG.info('Introspection environment is ready for node %s, '
'manual power on is required within %d seconds',
node.uuid, conf.getint('discoverd', 'timeout'))

View File

@ -19,34 +19,34 @@ import json
import logging
import sys
from flask import Flask, request, json as flask_json # noqa
import flask
from keystoneclient import exceptions
from ironic_discoverd import conf
from ironic_discoverd import discover
from ironic_discoverd import firewall
from ironic_discoverd import introspect
from ironic_discoverd import node_cache
from ironic_discoverd import process
from ironic_discoverd import utils
app = Flask(__name__)
app = flask.Flask(__name__)
LOG = logging.getLogger('ironic_discoverd.main')
def check_auth():
"""Check whether request is properly authenticated."""
if not conf.getboolean('discoverd', 'authenticate'):
return
if not request.headers.get('X-Auth-Token'):
if not flask.request.headers.get('X-Auth-Token'):
LOG.error("No X-Auth-Token header, rejecting request")
raise utils.DiscoveryFailed('Authentication required', code=401)
raise utils.Error('Authentication required', code=401)
try:
utils.check_is_admin(token=request.headers['X-Auth-Token'])
except exceptions.Unauthorized:
LOG.error("Keystone denied access, rejecting request")
raise utils.DiscoveryFailed('Access denied', code=403)
utils.check_is_admin(token=flask.request.headers['X-Auth-Token'])
except exceptions.Unauthorized as exc:
LOG.error("Keystone denied access: %s, rejecting request", exc)
raise utils.Error('Access denied', code=403)
def convert_exceptions(func):
@ -54,7 +54,7 @@ def convert_exceptions(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except utils.DiscoveryFailed as exc:
except utils.Error as exc:
return str(exc), exc.http_code
return wrapper
@ -62,8 +62,8 @@ def convert_exceptions(func):
@app.route('/v1/continue', methods=['POST'])
@convert_exceptions
def post_continue():
data = request.get_json(force=True)
def api_continue():
data = flask.request.get_json(force=True)
LOG.debug("/v1/continue got JSON %s", data)
res = process.process(data)
@ -72,31 +72,32 @@ def post_continue():
@app.route('/v1/introspection/<uuid>', methods=['GET', 'POST'])
@convert_exceptions
def introspection(uuid):
def api_introspection(uuid):
check_auth()
if request.method == 'POST':
setup_ipmi_credentials = request.args.get('setup_ipmi_credentials',
type=bool,
default=False)
discover.introspect(uuid,
setup_ipmi_credentials=setup_ipmi_credentials)
if flask.request.method == 'POST':
setup_ipmi_credentials = flask.request.args.get(
'setup_ipmi_credentials',
type=bool,
default=False)
introspect.introspect(uuid,
setup_ipmi_credentials=setup_ipmi_credentials)
return '', 202
else:
node_info = node_cache.get_node(uuid)
return flask_json.jsonify(finished=bool(node_info.finished_at),
return flask.json.jsonify(finished=bool(node_info.finished_at),
error=node_info.error or None)
@app.route('/v1/discover', methods=['POST'])
@convert_exceptions
def post_discover():
def api_discover():
check_auth()
data = request.get_json(force=True)
data = flask.request.get_json(force=True)
LOG.debug("/v1/discover got JSON %s", data)
for uuid in data:
discover.introspect(uuid)
introspect.introspect(uuid)
return "", 202

View File

@ -11,7 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cache for nodes currently under discovery."""
"""Cache for nodes currently under introspection."""
import json
import logging
@ -53,7 +53,7 @@ class NodeInfo(object):
@property
def options(self):
"""Node discovery options as a dict."""
"""Node introspection options as a dict."""
if self._options is None:
rows = _db().execute('select name, value from options '
'where uuid=?', (self.uuid,))
@ -71,18 +71,15 @@ class NodeInfo(object):
db.execute('insert into options(uuid, name, value) values(?,?,?)',
(self.uuid, name, encoded))
def finished(self, error=None, log=True):
def finished(self, error=None):
"""Record status for this node.
Also deletes look up attributes from the cache.
:param error: error message
:param log: whether to log the error message
"""
self.finished_at = time.time()
self.error = error
if error and log:
LOG.error(error)
with _db() as db:
db.execute('update nodes set finished_at=?, error=? where uuid=?',
@ -119,7 +116,7 @@ def _db():
def add_node(uuid, **attributes):
"""Store information about a node under discovery.
"""Store information about a node under introspection.
All existing information about this node is dropped.
Empty values are skipped.
@ -147,19 +144,18 @@ def add_node(uuid, **attributes):
"values(?, ?, ?)",
[(name, v, uuid) for v in value])
except sqlite3.IntegrityError as exc:
LOG.error('Database integrity error %s, some or all of '
'%s\'s %s seem to be on discovery already',
exc, name, value)
raise utils.DiscoveryFailed(
LOG.error('Database integrity error %s during '
'adding attributes', exc)
raise utils.Error(
'Some or all of %(name)s\'s %(value)s are already '
'on discovery' %
'on introspection' %
{'name': name, 'value': value})
return NodeInfo(uuid=uuid, started_at=started_at)
def macs_on_discovery():
"""List all MAC's that are on discovery right now."""
def active_macs():
"""List all MAC's that are on introspection right now."""
return {x[0] for x in _db().execute("select value from attributes "
"where name='mac'")}
@ -172,8 +168,7 @@ def get_node(uuid):
"""
row = _db().execute('select * from nodes where uuid=?', (uuid,)).fetchone()
if row is None:
raise utils.DiscoveryFailed('Could not find node %s in cache' % uuid,
code=404)
raise utils.Error('Could not find node %s in cache' % uuid, code=404)
return NodeInfo.from_row(row)
@ -182,7 +177,7 @@ def find_node(**attributes):
:param attributes: attributes known about this node (like macs, BMC etc)
:returns: structure NodeInfo with attributes ``uuid`` and ``created_at``
:raises: DiscoveryFailed if node is not found
:raises: Error if node is not found
"""
# NOTE(dtantsur): sorting is not required, but gives us predictability
found = set()
@ -203,28 +198,25 @@ def find_node(**attributes):
found.update(item[0] for item in rows)
if not found:
LOG.error('Could not find a node based on attributes %s',
list(attributes))
raise utils.DiscoveryFailed('Could not find a node', code=404)
raise utils.Error(
'Could not find a node for attributes %s' % attributes, code=404)
elif len(found) > 1:
LOG.error('Multiple nodes were matched based on attributes %(keys)s: '
'%(uuids)s',
{'keys': list(attributes),
'uuids': list(found)})
raise utils.DiscoveryFailed('Multiple matching nodes found', code=404)
raise utils.Error(
'Multiple matching nodes found for attributes %s: %s'
% (attributes, list(found)), code=404)
uuid = found.pop()
row = db.execute('select started_at, finished_at from nodes where uuid=?',
(uuid,)).fetchone()
if not row:
LOG.error('Inconsistent database: %s is in attributes table, '
'but not in nodes table', uuid)
raise utils.DiscoveryFailed('Could not find a node', code=404)
raise utils.Error(
'Could not find node %s in introspection cache, '
'probably it\'s not on introspection now' % uuid, code=404)
if row['finished_at']:
LOG.error('Discovery for node %s finished on %s already',
uuid, row['finished_at'])
raise utils.DiscoveryFailed('Discovery for node %s already finished')
raise utils.Error(
'Introspection for node %s already finished on %s' %
(uuid, row['finished_at']))
return NodeInfo(uuid=uuid, started_at=row['started_at'])
@ -232,7 +224,7 @@ def find_node(**attributes):
def clean_up():
"""Clean up the cache.
* Finish discovery for timed out nodes.
* Finish introspection for timed out nodes.
* Drop outdated node status information.
:return: list of timed out node UUID's
@ -256,10 +248,10 @@ def clean_up():
if not uuids:
return []
LOG.error('Discovery for nodes %s has timed out', uuids)
LOG.error('Introspection for nodes %s has timed out', uuids)
db.execute('update nodes set finished_at=?, error=? '
'where started_at < ?',
(time.time(), 'Discovery timed out', threshold))
(time.time(), 'Introspection timeout', threshold))
db.executemany('delete from attributes where uuid=?',
[(u,) for u in uuids])
db.executemany('delete from options where uuid=?',

View File

@ -23,30 +23,29 @@ from ironic_discoverd import conf
@six.add_metaclass(abc.ABCMeta)
class ProcessingHook(object): # pragma: no cover
"""Abstract base class for discovery data processing hooks."""
"""Abstract base class for introspection data processing hooks."""
def pre_discover(self, node_info):
"""Pre-discovery hook.
def before_processing(self, node_info):
"""Hook to run before any other data processing.
This hook is run before any processing is done on data, even sanity
checks.
This hook is run even before sanity checks.
:param node_info: raw information sent by the ramdisk, may be modified
by the hook.
:returns: nothing.
"""
def post_discover(self, node, ports, discovered_data):
"""Post-discovery hook.
def before_update(self, node, ports, node_info):
"""Hook to run before Ironic node update.
This hook is run after node is found, just before it's updated with the
data.
This hook is run after node is found and ports are created,
just before the node is updated with the data.
:param node: Ironic node as returned by the Ironic client, should not
be modified directly by the hook.
:param ports: Ironic ports created by discoverd, also should not be
updated directly.
:param discovered_data: processed data from the ramdisk.
:param node_info: processed data from the ramdisk.
:returns: tuple (node patches, port patches) where
*node_patches* is a list of JSON patches [RFC 6902] to apply
to the node, *port_patches* is a dict where keys are

View File

@ -22,8 +22,8 @@ LOG = logging.getLogger('ironic_discoverd.plugins.example')
class ExampleProcessingHook(base.ProcessingHook): # pragma: no cover
def pre_discover(self, node_info):
LOG.info('pre-discover: %s', node_info)
def before_processing(self, node_info):
LOG.debug('before_processing: %s', node_info)
def post_discover(self, node, ports, discovered_data):
LOG.info('post-discover: %s (node %s)', discovered_data, node.uuid)
def before_update(self, node, ports, node_info):
LOG.debug('before_update: %s (node %s)', node_info, node.uuid)

View File

@ -28,13 +28,11 @@ class SchedulerHook(base.ProcessingHook):
KEYS = ('cpus', 'cpu_arch', 'memory_mb', 'local_gb')
def pre_discover(self, node_info):
def before_processing(self, node_info):
"""Validate that required properties are provided by the ramdisk."""
missing = [key for key in self.KEYS if not node_info.get(key)]
if missing:
LOG.error('The following required parameters are missing: %s',
missing)
raise utils.DiscoveryFailed(
raise utils.Error(
'The following required parameters are missing: %s' %
missing)
@ -42,11 +40,11 @@ class SchedulerHook(base.ProcessingHook):
'memory %(memory_mb)s MiB, disk %(local_gb)s GiB',
{key: node_info.get(key) for key in self.KEYS})
def post_discover(self, node, ports, discovered_data):
def before_update(self, node, ports, node_info):
"""Update node with scheduler properties."""
overwrite = conf.getboolean('discoverd', 'overwrite_existing')
patch = [{'op': 'add', 'path': '/properties/%s' % key,
'value': str(discovered_data[key])}
'value': str(node_info[key])}
for key in self.KEYS
if overwrite or not node.properties.get(key)]
return patch, {}
@ -55,7 +53,8 @@ class SchedulerHook(base.ProcessingHook):
class ValidateInterfacesHook(base.ProcessingHook):
"""Hook to validate network interfaces."""
def pre_discover(self, node_info):
def before_processing(self, node_info):
"""Validate information about network interfaces."""
bmc_address = node_info.get('ipmi_address')
compat = conf.getboolean('discoverd', 'ports_for_inactive_interfaces')
@ -76,8 +75,8 @@ class ValidateInterfacesHook(base.ProcessingHook):
if valid_interfaces != node_info['interfaces']:
LOG.warning(
'The following interfaces were invalid or not eligible in '
'discovery data for node with BMC %(ipmi_address)s and were '
'excluded: %(invalid)s',
'introspection data for node with BMC %(ipmi_address)s and '
'were excluded: %(invalid)s',
{'invalid': {n: iface
for n, iface in node_info['interfaces'].items()
if n not in valid_interfaces},
@ -91,9 +90,8 @@ class ValidateInterfacesHook(base.ProcessingHook):
class RamdiskErrorHook(base.ProcessingHook):
"""Hook to process error send from the ramdisk."""
def pre_discover(self, node_info):
def before_processing(self, node_info):
if not node_info.get('error'):
return
LOG.error('Error happened during discovery: %s', node_info['error'])
raise utils.DiscoveryFailed(node_info['error'])
raise utils.Error('Ramdisk reported error: %s' % node_info['error'])

View File

@ -11,7 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handling discovery data from the ramdisk."""
"""Handling introspection data from the ramdisk."""
import logging
import time
@ -33,13 +33,13 @@ _POWER_OFF_CHECK_PERIOD = 5
def process(node_info):
"""Process data from discovery ramdisk.
"""Process data from the discovery ramdisk.
This function heavily relies on the hooks to do the actual data processing.
"""
hooks = plugins_base.processing_hooks_manager()
for hook_ext in hooks:
hook_ext.obj.pre_discover(node_info)
hook_ext.obj.before_processing(node_info)
cached_node = node_cache.find_node(
bmc_address=node_info.get('ipmi_address'),
@ -52,18 +52,18 @@ def process(node_info):
msg = ('Node UUID %s was found in cache, but is not found in Ironic'
% cached_node.uuid)
cached_node.finished(error=msg)
raise utils.DiscoveryFailed(msg, code=404)
raise utils.Error(msg, code=404)
try:
return _process_node(ironic, node, node_info, cached_node)
except utils.DiscoveryFailed as exc:
except utils.Error as exc:
cached_node.finished(error=str(exc))
raise
except Exception as exc:
msg = 'Unexpected exception during processing'
LOG.exception(msg)
cached_node.finished(error=msg, log=False)
raise utils.DiscoveryFailed(msg)
cached_node.finished(error=msg)
raise utils.Error(msg)
def _run_post_hooks(node, ports, node_info):
@ -73,7 +73,7 @@ def _run_post_hooks(node, ports, node_info):
node_patches = []
port_patches = {}
for hook_ext in hooks:
hook_patch = hook_ext.obj.post_discover(node, port_instances,
hook_patch = hook_ext.obj.before_update(node, port_instances,
node_info)
if not hook_patch:
continue
@ -94,7 +94,7 @@ def _process_node(ironic, node, node_info, cached_node):
port = ironic.port.create(node_uuid=node.uuid, address=mac)
ports[mac] = port
except exceptions.Conflict:
LOG.warning('MAC %(mac)s appeared in discovery data for '
LOG.warning('MAC %(mac)s appeared in introspection data for '
'node %(node)s, but already exists in '
'database - skipping',
{'mac': mac, 'node': node.uuid})
@ -104,7 +104,7 @@ def _process_node(ironic, node, node_info, cached_node):
for mac, patches in port_patches.items():
utils.retry_on_conflict(ironic.port.update, ports[mac].uuid, patches)
LOG.debug('Node %s was updated with data from discovery process, '
LOG.debug('Node %s was updated with data from introspection process, '
'patches %s, port patches %s',
node.uuid, node_patches, port_patches)
@ -117,7 +117,7 @@ def _process_node(ironic, node, node_info, cached_node):
'ipmi_username': node.driver_info.get('ipmi_username'),
'ipmi_password': node.driver_info.get('ipmi_password')}
else:
eventlet.greenthread.spawn_n(_finish_discovery, ironic, cached_node)
eventlet.greenthread.spawn_n(_finish, ironic, cached_node)
return {}
@ -128,14 +128,15 @@ def _wait_for_power_management(ironic, cached_node):
validation = utils.retry_on_conflict(ironic.node.validate,
cached_node.uuid)
if validation.power['result']:
_finish_discovery(ironic, cached_node)
_finish(ironic, cached_node)
return
LOG.debug('Waiting for management credentials on node %s '
'to be updated, current error: %s',
cached_node.uuid, validation.power['reason'])
msg = ('Timeout waiting for power credentials update of node %s '
'after discovery' % cached_node.uuid)
'after introspection' % cached_node.uuid)
LOG.error(msg)
cached_node.finished(error=msg)
@ -148,7 +149,7 @@ def _force_power_off(ironic, cached_node):
msg = ('Failed to power off node %s, check it\'s power '
'management configuration: %s' % (cached_node.uuid, exc))
cached_node.finished(error=msg)
raise utils.DiscoveryFailed(msg)
raise utils.Error(msg)
deadline = cached_node.started_at + conf.getint('discoverd', 'timeout')
while time.time() < deadline:
@ -159,13 +160,13 @@ def _force_power_off(ironic, cached_node):
cached_node.uuid, node.power_state)
eventlet.greenthread.sleep(_POWER_OFF_CHECK_PERIOD)
msg = ('Timeout waiting for node %s to power off after discovery' %
msg = ('Timeout waiting for node %s to power off after introspection' %
cached_node.uuid)
cached_node.finished(error=msg)
raise utils.DiscoveryFailed(msg)
raise utils.Error(msg)
def _finish_discovery(ironic, cached_node):
def _finish(ironic, cached_node):
_force_power_off(ironic, cached_node)
patch = [{'op': 'add', 'path': '/extra/newly_discovered', 'value': 'true'},
@ -173,4 +174,5 @@ def _finish_discovery(ironic, cached_node):
utils.retry_on_conflict(ironic.node.update, cached_node.uuid, patch)
cached_node.finished()
LOG.info('Discovery finished successfully for node %s', cached_node.uuid)
LOG.info('Introspection finished successfully for node %s',
cached_node.uuid)

View File

@ -16,8 +16,8 @@ from ironicclient import exceptions
import mock
from ironic_discoverd import conf
from ironic_discoverd import discover
from ironic_discoverd import firewall
from ironic_discoverd import introspect
from ironic_discoverd import node_cache
from ironic_discoverd.test import base as test_base
from ironic_discoverd import utils
@ -51,7 +51,7 @@ class TestDiscover(test_base.NodeTest):
cli.node.validate.return_value = mock.Mock(power={'result': True})
cli.node.list_ports.return_value = self.ports
discover.introspect(self.node.uuid)
introspect.introspect(self.node.uuid)
cli.node.get.assert_called_once_with(self.uuid)
cli.node.validate.assert_called_once_with(self.uuid)
@ -84,7 +84,7 @@ class TestDiscover(test_base.NodeTest):
cli.node.set_power_state.side_effect = [exceptions.Conflict,
None]
discover.introspect(self.node.uuid)
introspect.introspect(self.node.uuid)
cli.node.get.assert_called_once_with(self.uuid)
cli.node.validate.assert_called_with(self.uuid)
@ -107,7 +107,7 @@ class TestDiscover(test_base.NodeTest):
cli.node.validate.return_value = mock.Mock(power={'result': True})
cli.node.list_ports.return_value = self.ports
discover.introspect(self.node_compat.uuid)
introspect.introspect(self.node_compat.uuid)
cli.node.get.assert_called_once_with(self.node_compat.uuid)
cli.node.validate.assert_called_once_with(self.node_compat.uuid)
@ -131,7 +131,7 @@ class TestDiscover(test_base.NodeTest):
cli.node.get.return_value = self.node
cli.node.list_ports.return_value = []
discover.introspect(self.node.uuid)
introspect.introspect(self.node.uuid)
cli.node.list_ports.assert_called_once_with(self.uuid, limit=0)
@ -154,7 +154,7 @@ class TestDiscover(test_base.NodeTest):
cli.node.list_ports.return_value = self.ports
cli.node.validate.side_effect = Exception()
discover.introspect(self.uuid, setup_ipmi_credentials=True)
introspect.introspect(self.uuid, setup_ipmi_credentials=True)
cli.node.update.assert_called_once_with(self.uuid, self.patch)
add_mock.assert_called_once_with(self.uuid,
@ -171,21 +171,21 @@ class TestDiscover(test_base.NodeTest):
cli.node.list_ports.return_value = []
cli.node.validate.side_effect = Exception()
self.assertRaisesRegexp(utils.DiscoveryFailed, 'disabled',
discover.introspect, self.uuid,
self.assertRaisesRegexp(utils.Error, 'disabled',
introspect.introspect, self.uuid,
setup_ipmi_credentials=True)
def test_failed_to_get_node(self, client_mock, add_mock, filters_mock):
cli = client_mock.return_value
cli.node.get.side_effect = exceptions.NotFound()
self.assertRaisesRegexp(utils.DiscoveryFailed,
self.assertRaisesRegexp(utils.Error,
'Cannot find node',
discover.introspect, self.uuid)
introspect.introspect, self.uuid)
cli.node.get.side_effect = exceptions.BadRequest()
self.assertRaisesRegexp(utils.DiscoveryFailed,
self.assertRaisesRegexp(utils.Error,
'Cannot get node',
discover.introspect, self.uuid)
introspect.introspect, self.uuid)
self.assertEqual(0, cli.node.list_ports.call_count)
self.assertEqual(0, filters_mock.call_count)
@ -201,9 +201,9 @@ class TestDiscover(test_base.NodeTest):
'reason': 'oops'})
self.assertRaisesRegexp(
utils.DiscoveryFailed,
utils.Error,
'Failed validation of power interface for node',
discover.introspect, self.uuid)
introspect.introspect, self.uuid)
cli.node.validate.assert_called_once_with(self.uuid)
self.assertEqual(0, cli.node.list_ports.call_count)
@ -218,9 +218,9 @@ class TestDiscover(test_base.NodeTest):
cli.node.get.return_value = self.node
self.assertRaisesRegexp(
utils.DiscoveryFailed,
utils.Error,
'node uuid with provision state "active"',
discover.introspect, self.uuid)
introspect.introspect, self.uuid)
self.assertEqual(0, cli.node.list_ports.call_count)
self.assertEqual(0, filters_mock.call_count)
@ -234,9 +234,9 @@ class TestDiscover(test_base.NodeTest):
cli.node.get.return_value = self.node
self.assertRaisesRegexp(
utils.DiscoveryFailed,
utils.Error,
'node uuid with power state "power on"',
discover.introspect, self.uuid)
introspect.introspect, self.uuid)
self.assertEqual(0, cli.node.list_ports.call_count)
self.assertEqual(0, filters_mock.call_count)

View File

@ -15,12 +15,11 @@ import json
import unittest
import eventlet
from ironicclient import exceptions
from keystoneclient import exceptions as keystone_exc
import mock
from ironic_discoverd import conf
from ironic_discoverd import discover
from ironic_discoverd import introspect
from ironic_discoverd import main
from ironic_discoverd import node_cache
from ironic_discoverd.plugins import base as plugins_base
@ -37,51 +36,51 @@ class TestApi(test_base.BaseTest):
self.app = main.app.test_client()
conf.CONF.set('discoverd', 'authenticate', 'false')
@mock.patch.object(discover, 'introspect', autospec=True)
def test_introspect_no_authentication(self, discover_mock):
@mock.patch.object(introspect, 'introspect', autospec=True)
def test_introspect_no_authentication(self, introspect_mock):
conf.CONF.set('discoverd', 'authenticate', 'false')
res = self.app.post('/v1/introspection/uuid1')
self.assertEqual(202, res.status_code)
discover_mock.assert_called_once_with("uuid1",
setup_ipmi_credentials=False)
introspect_mock.assert_called_once_with("uuid1",
setup_ipmi_credentials=False)
@mock.patch.object(discover, 'introspect', autospec=True)
def test_introspect_setup_ipmi_credentials(self, discover_mock):
@mock.patch.object(introspect, 'introspect', autospec=True)
def test_introspect_setup_ipmi_credentials(self, introspect_mock):
conf.CONF.set('discoverd', 'authenticate', 'false')
res = self.app.post('/v1/introspection/uuid1?setup_ipmi_credentials=1')
self.assertEqual(202, res.status_code)
discover_mock.assert_called_once_with("uuid1",
setup_ipmi_credentials=True)
introspect_mock.assert_called_once_with("uuid1",
setup_ipmi_credentials=True)
@mock.patch.object(discover, 'introspect', autospec=True)
def test_intospect_failed(self, discover_mock):
discover_mock.side_effect = utils.DiscoveryFailed("boom")
@mock.patch.object(introspect, 'introspect', autospec=True)
def test_intospect_failed(self, introspect_mock):
introspect_mock.side_effect = utils.Error("boom")
res = self.app.post('/v1/introspection/uuid1')
self.assertEqual(400, res.status_code)
self.assertEqual(b"boom", res.data)
discover_mock.assert_called_once_with("uuid1",
setup_ipmi_credentials=False)
introspect_mock.assert_called_once_with("uuid1",
setup_ipmi_credentials=False)
@mock.patch.object(discover, 'introspect', autospec=True)
def test_discover_missing_authentication(self, discover_mock):
@mock.patch.object(introspect, 'introspect', autospec=True)
def test_introspect_missing_authentication(self, introspect_mock):
conf.CONF.set('discoverd', 'authenticate', 'true')
res = self.app.post('/v1/introspection/uuid1')
self.assertEqual(401, res.status_code)
self.assertFalse(discover_mock.called)
self.assertFalse(introspect_mock.called)
@mock.patch.object(utils, 'check_is_admin', autospec=True)
@mock.patch.object(discover, 'introspect', autospec=True)
def test_discover_failed_authentication(self, discover_mock,
keystone_mock):
@mock.patch.object(introspect, 'introspect', autospec=True)
def test_introspect_failed_authentication(self, introspect_mock,
keystone_mock):
conf.CONF.set('discoverd', 'authenticate', 'true')
keystone_mock.side_effect = keystone_exc.Unauthorized()
res = self.app.post('/v1/introspection/uuid1',
headers={'X-Auth-Token': 'token'})
self.assertEqual(403, res.status_code)
self.assertFalse(discover_mock.called)
self.assertFalse(introspect_mock.called)
keystone_mock.assert_called_once_with(token='token')
@mock.patch.object(discover, 'introspect', autospec=True)
@mock.patch.object(introspect, 'introspect', autospec=True)
def test_discover(self, discover_mock):
res = self.app.post('/v1/discover', data='["uuid1"]')
self.assertEqual(202, res.status_code)
@ -98,7 +97,7 @@ class TestApi(test_base.BaseTest):
@mock.patch.object(process, 'process', autospec=True)
def test_continue_failed(self, process_mock):
process_mock.side_effect = utils.DiscoveryFailed("boom")
process_mock.side_effect = utils.Error("boom")
res = self.app.post('/v1/continue', data='"JSON"')
self.assertEqual(400, res.status_code)
process_mock.assert_called_once_with("JSON")
@ -151,41 +150,20 @@ class TestCheckIronicAvailable(test_base.BaseTest):
class TestPlugins(unittest.TestCase):
@mock.patch.object(example_plugin.ExampleProcessingHook, 'pre_discover',
autospec=True)
@mock.patch.object(example_plugin.ExampleProcessingHook, 'post_discover',
autospec=True)
@mock.patch.object(example_plugin.ExampleProcessingHook,
'before_processing', autospec=True)
@mock.patch.object(example_plugin.ExampleProcessingHook,
'before_update', autospec=True)
def test_hook(self, mock_post, mock_pre):
plugins_base._HOOKS_MGR = None
conf.CONF.set('discoverd', 'processing_hooks', 'example')
mgr = plugins_base.processing_hooks_manager()
mgr.map_method('pre_discover', 'node_info')
mgr.map_method('before_processing', 'node_info')
mock_pre.assert_called_once_with(mock.ANY, 'node_info')
mgr.map_method('post_discover', 'node', ['port'], 'node_info')
mgr.map_method('before_update', 'node', ['port'], 'node_info')
mock_post.assert_called_once_with(mock.ANY, 'node', ['port'],
'node_info')
def test_manager_is_cached(self):
self.assertIs(plugins_base.processing_hooks_manager(),
plugins_base.processing_hooks_manager())
@mock.patch.object(eventlet.greenthread, 'sleep', lambda _: None)
class TestUtils(unittest.TestCase):
def test_retry_on_conflict(self):
call = mock.Mock()
call.side_effect = ([exceptions.Conflict()] * (utils.RETRY_COUNT - 1)
+ [mock.sentinel.result])
res = utils.retry_on_conflict(call, 1, 2, x=3)
self.assertEqual(mock.sentinel.result, res)
call.assert_called_with(1, 2, x=3)
self.assertEqual(utils.RETRY_COUNT, call.call_count)
def test_retry_on_conflict_fail(self):
call = mock.Mock()
call.side_effect = ([exceptions.Conflict()] * (utils.RETRY_COUNT + 1)
+ [mock.sentinel.result])
self.assertRaises(exceptions.Conflict, utils.retry_on_conflict,
call, 1, 2, x=3)
call.assert_called_with(1, 2, x=3)
self.assertEqual(utils.RETRY_COUNT, call.call_count)

View File

@ -59,11 +59,11 @@ class TestNodeCache(test_base.NodeTest):
"values(?, ?, ?)",
('mac', '11:22:11:22:11:22', 'another-uuid'))
self.assertRaises(utils.DiscoveryFailed,
self.assertRaises(utils.Error,
node_cache.add_node,
self.node.uuid, mac=['11:22:11:22:11:22'])
def test_macs_on_discovery(self):
def test_active_macs(self):
with self.db:
self.db.execute("insert into nodes(uuid) values(?)",
(self.node.uuid,))
@ -72,7 +72,7 @@ class TestNodeCache(test_base.NodeTest):
[('mac', '11:22:11:22:11:22', self.uuid),
('mac', '22:11:22:11:22:11', self.uuid)])
self.assertEqual({'11:22:11:22:11:22', '22:11:22:11:22:11'},
node_cache.macs_on_discovery())
node_cache.active_macs())
class TestNodeCacheFind(test_base.NodeTest):
@ -84,8 +84,8 @@ class TestNodeCacheFind(test_base.NodeTest):
mac=self.macs)
def test_no_data(self):
self.assertRaises(utils.DiscoveryFailed, node_cache.find_node)
self.assertRaises(utils.DiscoveryFailed, node_cache.find_node, mac=[])
self.assertRaises(utils.Error, node_cache.find_node)
self.assertRaises(utils.Error, node_cache.find_node, mac=[])
def test_bmc(self):
res = node_cache.find_node(bmc_address='1.2.3.4')
@ -98,13 +98,13 @@ class TestNodeCacheFind(test_base.NodeTest):
self.assertTrue(time.time() - 60 < res.started_at < time.time() + 1)
def test_macs_not_found(self):
self.assertRaises(utils.DiscoveryFailed, node_cache.find_node,
self.assertRaises(utils.Error, node_cache.find_node,
mac=['11:22:33:33:33:33',
'66:66:44:33:22:11'])
def test_macs_multiple_found(self):
node_cache.add_node('uuid2', mac=self.macs2)
self.assertRaises(utils.DiscoveryFailed, node_cache.find_node,
self.assertRaises(utils.Error, node_cache.find_node,
mac=[self.macs[0], self.macs2[0]])
def test_both(self):
@ -116,14 +116,14 @@ class TestNodeCacheFind(test_base.NodeTest):
def test_inconsistency(self):
with self.db:
self.db.execute('delete from nodes where uuid=?', (self.uuid,))
self.assertRaises(utils.DiscoveryFailed, node_cache.find_node,
self.assertRaises(utils.Error, node_cache.find_node,
bmc_address='1.2.3.4')
def test_already_finished(self):
with self.db:
self.db.execute('update nodes set finished_at=42.0 where uuid=?',
(self.uuid,))
self.assertRaises(utils.DiscoveryFailed, node_cache.find_node,
self.assertRaises(utils.Error, node_cache.find_node,
bmc_address='1.2.3.4')
@ -176,7 +176,8 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
res = [tuple(row) for row in self.db.execute(
'select finished_at, error from nodes').fetchall()]
self.assertEqual([(self.started_at + 100, 'Discovery timed out')], res)
self.assertEqual([(self.started_at + 100, 'Introspection timeout')],
res)
self.assertEqual([], self.db.execute(
'select * from attributes').fetchall())
self.assertEqual([], self.db.execute(
@ -208,7 +209,7 @@ class TestNodeCacheGetNode(test_base.NodeTest):
self.assertIsNone(info.error)
def test_not_found(self):
self.assertRaises(utils.DiscoveryFailed, node_cache.get_node, 'foo')
self.assertRaises(utils.Error, node_cache.get_node, 'foo')
@mock.patch.object(time, 'time', lambda: 42.0)

View File

@ -156,7 +156,7 @@ class TestProcess(BaseTest):
def test_error(self, cli, pop_mock, process_mock):
self.data['error'] = 'BOOM'
self.assertRaisesRegexp(utils.DiscoveryFailed,
self.assertRaisesRegexp(utils.Error,
'BOOM',
process.process, self.data)
self.assertFalse(process_mock.called)
@ -165,16 +165,16 @@ class TestProcess(BaseTest):
def test_missing_required(self, cli, pop_mock, process_mock):
del self.data['cpus']
self.assertRaisesRegexp(utils.DiscoveryFailed,
self.assertRaisesRegexp(utils.Error,
'missing',
process.process, self.data)
self.assertFalse(process_mock.called)
@prepare_mocks
def test_not_found_in_cache(self, cli, pop_mock, process_mock):
pop_mock.side_effect = utils.DiscoveryFailed('not found')
pop_mock.side_effect = utils.Error('not found')
self.assertRaisesRegexp(utils.DiscoveryFailed,
self.assertRaisesRegexp(utils.Error,
'not found',
process.process, self.data)
self.assertFalse(cli.node.get.called)
@ -184,7 +184,7 @@ class TestProcess(BaseTest):
def test_not_found_in_ironic(self, cli, pop_mock, process_mock):
cli.node.get.side_effect = exceptions.NotFound()
self.assertRaisesRegexp(utils.DiscoveryFailed,
self.assertRaisesRegexp(utils.Error,
'not found',
process.process, self.data)
cli.node.get.assert_called_once_with(self.uuid)
@ -196,9 +196,9 @@ class TestProcess(BaseTest):
pop_mock.return_value = node_cache.NodeInfo(
uuid=self.node.uuid,
started_at=self.started_at)
process_mock.side_effect = utils.DiscoveryFailed('boom')
process_mock.side_effect = utils.Error('boom')
self.assertRaisesRegexp(utils.DiscoveryFailed, 'boom',
self.assertRaisesRegexp(utils.Error, 'boom',
process.process, self.data)
finished_mock.assert_called_once_with(mock.ANY, error='boom')
@ -211,18 +211,18 @@ class TestProcess(BaseTest):
started_at=self.started_at)
process_mock.side_effect = RuntimeError('boom')
self.assertRaisesRegexp(utils.DiscoveryFailed, 'Unexpected exception',
self.assertRaisesRegexp(utils.Error, 'Unexpected exception',
process.process, self.data)
finished_mock.assert_called_once_with(
mock.ANY, log=False,
mock.ANY,
error='Unexpected exception during processing')
@mock.patch.object(eventlet.greenthread, 'spawn_n',
lambda f, *a: f(*a) and None)
@mock.patch.object(eventlet.greenthread, 'sleep', lambda _: None)
@mock.patch.object(example_plugin.ExampleProcessingHook, 'post_discover')
@mock.patch.object(example_plugin.ExampleProcessingHook, 'before_update')
@mock.patch.object(firewall, 'update_filters', autospec=True)
class TestProcessNode(BaseTest):
def setUp(self):
@ -334,13 +334,14 @@ class TestProcessNode(BaseTest):
time_mock.return_value = self.started_at + 1000
self.cli.node.get.return_value = self.node
self.assertRaisesRegexp(utils.DiscoveryFailed, 'power off', self.call)
self.assertRaisesRegexp(utils.Error, 'power off', self.call)
self.cli.node.update.assert_called_once_with(self.uuid,
self.patch_before)
finished_mock.assert_called_once_with(
mock.ANY,
error='Timeout waiting for node uuid to power off after discovery')
error='Timeout waiting for node uuid to power off '
'after introspection')
def test_port_failed(self, filters_mock, post_hook_mock):
self.ports[0] = exceptions.Conflict()
@ -396,15 +397,15 @@ class TestProcessNode(BaseTest):
self.assertFalse(self.cli.node.set_power_state.called)
finished_mock.assert_called_once_with(
mock.ANY,
error='Timeout waiting for power credentials update of node '
'uuid after discovery')
error='Timeout waiting for power credentials update of node uuid '
'after introspection')
@mock.patch.object(node_cache.NodeInfo, 'finished', autospec=True)
def test_power_off_failed(self, finished_mock, filters_mock,
post_hook_mock):
self.cli.node.set_power_state.side_effect = RuntimeError('boom')
self.assertRaisesRegexp(utils.DiscoveryFailed, 'Failed to power off',
self.assertRaisesRegexp(utils.Error, 'Failed to power off',
self.call)
self.cli.node.set_power_state.assert_called_once_with(self.uuid, 'off')

View File

@ -11,6 +11,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import eventlet
from ironicclient import exceptions
from keystoneclient import exceptions as keystone_exc
import mock
@ -19,8 +23,7 @@ from ironic_discoverd.test import base
from ironic_discoverd import utils
class TestUtils(base.BaseTest):
class TestCheckIsAdmin(base.BaseTest):
@mock.patch('keystoneclient.v2_0.client.Client')
def test_admin_token(self, mock_ks):
conf.CONF.set('discoverd', 'os_auth_url', '127.0.0.1')
@ -39,3 +42,24 @@ class TestUtils(base.BaseTest):
fake_client.roles.roles_for_user.return_value = [mockMember]
self.assertRaises(keystone_exc.Unauthorized,
utils.check_is_admin, 'token')
@mock.patch.object(eventlet.greenthread, 'sleep', lambda _: None)
class TestRetryOnConflict(unittest.TestCase):
def test_retry_on_conflict(self):
call = mock.Mock()
call.side_effect = ([exceptions.Conflict()] * (utils.RETRY_COUNT - 1)
+ [mock.sentinel.result])
res = utils.retry_on_conflict(call, 1, 2, x=3)
self.assertEqual(mock.sentinel.result, res)
call.assert_called_with(1, 2, x=3)
self.assertEqual(utils.RETRY_COUNT, call.call_count)
def test_retry_on_conflict_fail(self):
call = mock.Mock()
call.side_effect = ([exceptions.Conflict()] * (utils.RETRY_COUNT + 1)
+ [mock.sentinel.result])
self.assertRaises(exceptions.Conflict, utils.retry_on_conflict,
call, 1, 2, x=3)
call.assert_called_with(1, 2, x=3)
self.assertEqual(utils.RETRY_COUNT, call.call_count)

View File

@ -30,13 +30,17 @@ RETRY_COUNT = 10
RETRY_DELAY = 2
class DiscoveryFailed(Exception):
class Error(Exception):
"""Discoverd exception."""
def __init__(self, msg, code=400):
super(DiscoveryFailed, self).__init__(msg)
super(Error, self).__init__(msg)
LOG.error(msg)
self.http_code = code
def get_client(): # pragma: no cover
"""Get Ironic client instance."""
args = dict((k, conf.get('discoverd', k)) for k in OS_ARGS)
return client.get_client(1, **args)
@ -60,12 +64,14 @@ def check_is_admin(token):
def is_valid_mac(address):
"""Return whether given value is a valid MAC."""
m = "[0-9a-f]{2}(:[0-9a-f]{2}){5}$"
return (isinstance(address, six.string_types)
and re.match(m, address.lower()))
def retry_on_conflict(call, *args, **kwargs):
"""Wrapper to retry 409 CONFLICT exceptions."""
for i in range(RETRY_COUNT):
try:
return call(*args, **kwargs)