Fixed compatibiliy with stable/liberty

Change-Id: I2e79db9816e7cd7c55318dcb20c72bd7671c00d2
This commit is contained in:
Mark Goddard 2016-04-04 13:23:39 +03:00 committed by Oleksandr Berezovskyi
parent de9c84dc80
commit 210a6383e4
10 changed files with 693 additions and 652 deletions

View File

@ -14,7 +14,7 @@
# limitations under the License.
from ironic.drivers import base
from ironic.drivers.modules import discoverd
from ironic.drivers.modules import inspector
from ironic.drivers.modules import ipmitool
from ironic.drivers.modules import ssh
@ -39,7 +39,7 @@ class BareonSwiftAndIPMIToolDriver(base.BaseDriver):
self.deploy = bareon_swift.BareonSwiftDeploy()
self.management = ipmitool.IPMIManagement()
self.vendor = bareon_swift.BareonSwiftVendor()
self.inspect = discoverd.DiscoverdInspect.create_if_enabled(
self.inspect = inspector.Inspector.create_if_enabled(
'BareonSwiftAndIPMIToolDriver')
@ -61,7 +61,7 @@ class BareonSwiftAndSSHDriver(base.BaseDriver):
self.deploy = bareon_swift.BareonSwiftDeploy()
self.management = ssh.SSHManagement()
self.vendor = bareon_swift.BareonSwiftVendor()
self.inspect = discoverd.DiscoverdInspect.create_if_enabled(
self.inspect = inspector.Inspector.create_if_enabled(
'BareonSwiftAndSSHDriver')
@ -82,7 +82,7 @@ class BareonRsyncAndIPMIToolDriver(base.BaseDriver):
self.deploy = bareon_rsync.BareonRsyncDeploy()
self.management = ipmitool.IPMIManagement()
self.vendor = bareon_rsync.BareonRsyncVendor()
self.inspect = discoverd.DiscoverdInspect.create_if_enabled(
self.inspect = inspector.Inspector.create_if_enabled(
'BareonRsyncAndIPMIToolDriver')
@ -104,5 +104,5 @@ class BareonRsyncAndSSHDriver(base.BaseDriver):
self.deploy = bareon_rsync.BareonRsyncDeploy()
self.management = ssh.SSHManagement()
self.vendor = bareon_rsync.BareonRsyncVendor()
self.inspect = discoverd.DiscoverdInspect.create_if_enabled(
self.inspect = inspector.Inspector.create_if_enabled(
'BareonRsyncAndSSHDriver')

View File

@ -27,6 +27,9 @@ import six
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_utils import excutils
from oslo_utils import fileutils
from oslo_log import log
from oslo_service import loopingcall
from ironic.common import boot_devices
from ironic.common import dhcp_factory
@ -45,9 +48,6 @@ from ironic.drivers import base
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules import image_cache
from ironic.objects import node as db_node
from ironic.openstack.common import fileutils
from ironic.openstack.common import log
from ironic.openstack.common import loopingcall
from bareon_ironic.modules import bareon_exception
from bareon_ironic.modules import bareon_utils

View File

@ -23,6 +23,7 @@ import tempfile
import six
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import strutils
from ironic.common import dhcp_factory
@ -30,7 +31,6 @@ from ironic.common import exception
from ironic.common import keystone
from ironic.common import utils
from ironic.common.i18n import _, _LW
from ironic.openstack.common import log as logging
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

View File

@ -23,9 +23,9 @@ import tempfile
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log
from ironic.common import exception
from ironic.openstack.common import log
from bareon_ironic.modules import bareon_utils
from bareon_ironic.modules.resources import resources

View File

@ -22,6 +22,7 @@ import uuid
from oslo_config import cfg
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_utils import uuidutils
import requests
import six
@ -29,7 +30,6 @@ import six.moves.urllib.parse as urlparse
from ironic.common import exception
from ironic.common.i18n import _
from ironic.openstack.common import log as logging
from ironic.common import image_service
from ironic.common import keystone
from ironic.common import utils

View File

@ -19,14 +19,14 @@ import os
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import fileutils
from oslo_log import log
from six.moves.urllib import parse
from ironic.common import exception
from ironic.common import utils
from ironic.common.i18n import _
from ironic.drivers.modules import image_cache
from ironic.openstack.common import fileutils
from ironic.openstack.common import log
from bareon_ironic.modules import bareon_exception
from bareon_ironic.modules import bareon_utils

View File

@ -17,8 +17,7 @@
import os
from oslo_config import cfg
from ironic.openstack.common import log
from oslo_log import log
rsync_opts = [
cfg.StrOpt('rsync_server',

View File

@ -1,311 +0,0 @@
diff --git a/ironic/api/config.py b/ironic/api/config.py
index 38938c1..18c82fd 100644
--- a/ironic/api/config.py
+++ b/ironic/api/config.py
@@ -31,7 +31,8 @@ app = {
'/',
'/v1',
'/v1/drivers/[a-z_]*/vendor_passthru/lookup',
- '/v1/nodes/[a-z0-9\-]+/vendor_passthru/heartbeat'
+ '/v1/nodes/[a-z0-9\-]+/vendor_passthru/heartbeat',
+ '/v1/nodes/[a-z0-9\-]+/vendor_passthru/pass_deploy_info',
],
}
diff --git a/ironic/api/controllers/v1/node.py b/ironic/api/controllers/v1/node.py
index ce48e09..0df9a3f 100644
--- a/ironic/api/controllers/v1/node.py
+++ b/ironic/api/controllers/v1/node.py
@@ -381,13 +381,17 @@ class NodeStatesController(rest.RestController):
rpc_node = api_utils.get_rpc_node(node_ident)
topic = pecan.request.rpcapi.get_topic_for(rpc_node)
+ driver = api_utils.get_driver_by_name(rpc_node.driver)
+ driver_can_terminate = (driver and
+ driver.deploy.can_terminate_deployment)
# Normally, we let the task manager recognize and deal with
# NodeLocked exceptions. However, that isn't done until the RPC calls
# below. In order to main backward compatibility with our API HTTP
# response codes, we have this check here to deal with cases where
# a node is already being operated on (DEPLOYING or such) and we
# want to continue returning 409. Without it, we'd return 400.
- if rpc_node.reservation:
+ if (not (target == ir_states.DELETED and driver_can_terminate) and
+ rpc_node.reservation):
raise exception.NodeLocked(node=rpc_node.uuid,
host=rpc_node.reservation)
diff --git a/ironic/api/controllers/v1/utils.py b/ironic/api/controllers/v1/utils.py
index 6132e12..91ca0f2 100644
--- a/ironic/api/controllers/v1/utils.py
+++ b/ironic/api/controllers/v1/utils.py
@@ -19,6 +19,7 @@ from oslo_utils import uuidutils
import pecan
import wsme
+from ironic.common import driver_factory
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import utils
@@ -102,3 +103,12 @@ def is_valid_node_name(name):
:returns: True if the name is valid, False otherwise.
"""
return utils.is_hostname_safe(name) and (not uuidutils.is_uuid_like(name))
+
+
+def get_driver_by_name(driver_name):
+ _driver_factory = driver_factory.DriverFactory()
+ try:
+ driver = _driver_factory[driver_name]
+ return driver.obj
+ except Exception:
+ return None
diff --git a/ironic/common/context.py b/ironic/common/context.py
index aaeffb3..d167e26 100644
--- a/ironic/common/context.py
+++ b/ironic/common/context.py
@@ -63,5 +63,4 @@ class RequestContext(context.RequestContext):
@classmethod
def from_dict(cls, values):
values.pop('user', None)
- values.pop('tenant', None)
return cls(**values)
diff --git a/ironic/common/states.py b/ironic/common/states.py
index 7ebd052..df30c2f 100644
--- a/ironic/common/states.py
+++ b/ironic/common/states.py
@@ -218,6 +218,9 @@ machine.add_state(INSPECTFAIL, target=MANAGEABLE, **watchers)
# A deployment may fail
machine.add_transition(DEPLOYING, DEPLOYFAIL, 'fail')
+# A deployment may be terminated
+machine.add_transition(DEPLOYING, DELETING, 'delete')
+
# A failed deployment may be retried
# ironic/conductor/manager.py:do_node_deploy()
machine.add_transition(DEPLOYFAIL, DEPLOYING, 'rebuild')
diff --git a/ironic/common/swift.py b/ironic/common/swift.py
index a4444e2..4cc36c4 100644
--- a/ironic/common/swift.py
+++ b/ironic/common/swift.py
@@ -23,6 +23,7 @@ from swiftclient import utils as swift_utils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import keystone
+from ironic.common import utils
from ironic.openstack.common import log as logging
swift_opts = [
@@ -36,6 +37,13 @@ swift_opts = [
CONF = cfg.CONF
CONF.register_opts(swift_opts, group='swift')
+CONF.import_opt('swift_endpoint_url',
+ 'ironic.common.glance_service.v2.image_service',
+ group='glance')
+CONF.import_opt('swift_api_version',
+ 'ironic.common.glance_service.v2.image_service',
+ group='glance')
+
CONF.import_opt('admin_user', 'keystonemiddleware.auth_token',
group='keystone_authtoken')
CONF.import_opt('admin_tenant_name', 'keystonemiddleware.auth_token',
@@ -60,7 +68,9 @@ class SwiftAPI(object):
tenant_name=CONF.keystone_authtoken.admin_tenant_name,
key=CONF.keystone_authtoken.admin_password,
auth_url=CONF.keystone_authtoken.auth_uri,
- auth_version=CONF.keystone_authtoken.auth_version):
+ auth_version=CONF.keystone_authtoken.auth_version,
+ preauthtoken=None,
+ preauthtenant=None):
"""Constructor for creating a SwiftAPI object.
:param user: the name of the user for Swift account
@@ -68,15 +78,40 @@ class SwiftAPI(object):
:param key: the 'password' or key to authenticate with
:param auth_url: the url for authentication
:param auth_version: the version of api to use for authentication
+ :param preauthtoken: authentication token (if you have already
+ authenticated) note authurl/user/key/tenant_name
+ are not required when specifying preauthtoken
+ :param preauthtenant a tenant that will be accessed using the
+ preauthtoken
"""
- auth_url = keystone.get_keystone_url(auth_url, auth_version)
- params = {'retries': CONF.swift.swift_max_retries,
- 'insecure': CONF.keystone_authtoken.insecure,
- 'user': user,
- 'tenant_name': tenant_name,
- 'key': key,
- 'authurl': auth_url,
- 'auth_version': auth_version}
+ params = {
+ 'retries': CONF.swift.swift_max_retries,
+ 'insecure': CONF.keystone_authtoken.insecure
+ }
+
+ if preauthtoken:
+ # Determining swift url for the user's tenant account.
+ tenant_id = utils.get_tenant_id(tenant_name=preauthtenant)
+ url = "{endpoint}/{api_ver}/AUTH_{tenant}".format(
+ endpoint=CONF.glance.swift_endpoint_url,
+ api_ver=CONF.glance.swift_api_version,
+ tenant=tenant_id
+ )
+ # authurl/user/key/tenant_name are not required when specifying
+ # preauthtoken
+ params.update({
+ 'preauthtoken': preauthtoken,
+ 'preauthurl': url
+ })
+ else:
+ auth_url = keystone.get_keystone_url(auth_url, auth_version)
+ params.update({
+ 'user': user,
+ 'tenant_name': tenant_name,
+ 'key': key,
+ 'authurl': auth_url,
+ 'auth_version': auth_version
+ })
self.connection = swift_client.Connection(**params)
@@ -128,8 +163,8 @@ class SwiftAPI(object):
operation = _("head account")
raise exception.SwiftOperationError(operation=operation,
error=e)
-
- storage_url, token = self.connection.get_auth()
+ storage_url = (self.connection.os_options.get('object_storage_url') or
+ self.connection.get_auth()[0])
parse_result = parse.urlparse(storage_url)
swift_object_path = '/'.join((parse_result.path, container, object))
temp_url_key = account_info['x-account-meta-temp-url-key']
@@ -186,3 +221,23 @@ class SwiftAPI(object):
except swift_exceptions.ClientException as e:
operation = _("post object")
raise exception.SwiftOperationError(operation=operation, error=e)
+
+ def get_object(self, container, object, object_headers=None,
+ chunk_size=None):
+ """Get Swift object.
+
+ :param container: The name of the container in which Swift object
+ is placed.
+ :param object: The name of the object in Swift
+ :param object_headers: the headers for the object to pass to Swift
+ :param chunk_size: size of the chunk used read to read from response
+ :returns: Tuple (body, headers)
+ :raises: SwiftOperationError, if operation with Swift fails.
+ """
+ try:
+ return self.connection.get_object(container, object,
+ headers=object_headers,
+ resp_chunk_size=chunk_size)
+ except swift_exceptions.ClientException as e:
+ operation = _("get object")
+ raise exception.SwiftOperationError(operation=operation, error=e)
diff --git a/ironic/common/utils.py b/ironic/common/utils.py
index 3633f82..4d1ca28 100644
--- a/ironic/common/utils.py
+++ b/ironic/common/utils.py
@@ -38,6 +38,7 @@ from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LE
from ironic.common.i18n import _LW
+from ironic.common import keystone
from ironic.openstack.common import log as logging
utils_opts = [
@@ -536,3 +537,8 @@ def dd(src, dst, *args):
def is_http_url(url):
url = url.lower()
return url.startswith('http://') or url.startswith('https://')
+
+
+def get_tenant_id(tenant_name):
+ ksclient = keystone._get_ksclient()
+ return ksclient.tenants.find(name=tenant_name).to_dict()['id']
diff --git a/ironic/conductor/manager.py b/ironic/conductor/manager.py
index c2b75bc..53f516b 100644
--- a/ironic/conductor/manager.py
+++ b/ironic/conductor/manager.py
@@ -766,6 +766,11 @@ class ConductorManager(periodic_task.PeriodicTasks):
"""
LOG.debug("RPC do_node_tear_down called for node %s." % node_id)
+ with task_manager.acquire(context, node_id, shared=True) as task:
+ if (task.node.provision_state == states.DEPLOYING and
+ task.driver.deploy.can_terminate_deployment):
+ task.driver.deploy.terminate_deployment(task)
+
with task_manager.acquire(context, node_id, shared=False) as task:
try:
# NOTE(ghe): Valid power driver values are needed to perform
diff --git a/ironic/drivers/base.py b/ironic/drivers/base.py
index e0685d0..d1fa4bc 100644
--- a/ironic/drivers/base.py
+++ b/ironic/drivers/base.py
@@ -318,6 +318,13 @@ class DeployInterface(BaseInterface):
"""
pass
+ def terminate_deployment(self, *args, **kwargs):
+ pass
+
+ @property
+ def can_terminate_deployment(self):
+ return False
+
@six.add_metaclass(abc.ABCMeta)
class PowerInterface(BaseInterface):
diff --git a/ironic/drivers/modules/image_cache.py b/ironic/drivers/modules/image_cache.py
index d7b27c0..eb3ec55 100644
--- a/ironic/drivers/modules/image_cache.py
+++ b/ironic/drivers/modules/image_cache.py
@@ -25,9 +25,9 @@ import uuid
from oslo_concurrency import lockutils
from oslo_config import cfg
+from oslo_utils import uuidutils
from ironic.common import exception
-from ironic.common.glance_service import service_utils
from ironic.common.i18n import _LI
from ironic.common.i18n import _LW
from ironic.common import images
@@ -100,15 +100,15 @@ class ImageCache(object):
# TODO(ghe): have hard links and counts the same behaviour in all fs
- # NOTE(vdrok): File name is converted to UUID if it's not UUID already,
- # so that two images with same file names do not collide
- if service_utils.is_glance_image(href):
- master_file_name = service_utils.parse_image_ref(href)[0]
+ if uuidutils.is_uuid_like(href):
+ master_file_name = href
+ elif (self._image_service and
+ hasattr(self._image_service, 'get_image_unique_id')):
+ master_file_name = self._image_service.get_image_unique_id(href)
else:
- # NOTE(vdrok): Doing conversion of href in case it's unicode
- # string, UUID cannot be generated for unicode strings on python 2.
master_file_name = str(uuid.uuid5(uuid.NAMESPACE_URL,
href.encode('utf-8')))
+
master_path = os.path.join(self.master_dir, master_file_name)
if CONF.parallel_image_downloads:
diff --git a/ironic/tests/test_swift.py b/ironic/tests/test_swift.py
index 9daa06e..aaa1b7c 100644
--- a/ironic/tests/test_swift.py
+++ b/ironic/tests/test_swift.py
@@ -113,6 +113,7 @@ class SwiftTestCase(base.TestCase):
connection_obj_mock.get_auth.return_value = auth
head_ret_val = {'x-account-meta-temp-url-key': 'secretkey'}
connection_obj_mock.head_account.return_value = head_ret_val
+ connection_obj_mock.os_options = {}
gen_temp_url_mock.return_value = 'temp-url-path'
temp_url_returned = swiftapi.get_temp_url('container', 'object', 10)
connection_obj_mock.get_auth.assert_called_once_with()

View File

@ -0,0 +1,508 @@
diff --git a/ironic/api/controllers/v1/node.py b/ironic/api/controllers/v1/node.py
index d95298f..1b99c68 100644
--- a/ironic/api/controllers/v1/node.py
+++ b/ironic/api/controllers/v1/node.py
@@ -452,8 +452,13 @@ class NodeStatesController(rest.RestController):
raise exception.NodeInMaintenance(op=_('provisioning'),
node=rpc_node.uuid)
+ driver = api_utils.get_driver_by_name(rpc_node.driver)
+ driver_can_terminate = (driver and
+ driver.deploy.can_terminate_deployment)
+
m = ir_states.machine.copy()
m.initialize(rpc_node.provision_state)
+
if not m.is_actionable_event(ir_states.VERBS.get(target, target)):
# Normally, we let the task manager recognize and deal with
# NodeLocked exceptions. However, that isn't done until the RPC
@@ -470,6 +475,16 @@ class NodeStatesController(rest.RestController):
action=target, node=rpc_node.uuid,
state=rpc_node.provision_state)
+ # Note(obereozvskyi): we need to check weather driver supports deploy
+ # terminating
+ if (m.current_state == ir_states.DEPLOYING and
+ target == ir_states.DELETED and
+ not driver_can_terminate):
+
+ raise exception.InvalidStateRequested(
+ action=target, node=rpc_node.uuid,
+ state=rpc_node.provision_state)
+
if configdrive and target != ir_states.ACTIVE:
msg = (_('Adding a config drive is only supported when setting '
'provision state to %s') % ir_states.ACTIVE)
diff --git a/ironic/api/controllers/v1/utils.py b/ironic/api/controllers/v1/utils.py
index 538ca45..3715d41 100644
--- a/ironic/api/controllers/v1/utils.py
+++ b/ironic/api/controllers/v1/utils.py
@@ -23,6 +23,7 @@ from webob.static import FileIter
import wsme
from ironic.api.controllers.v1 import versions
+from ironic.common import driver_factory
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import states
@@ -109,7 +110,16 @@ def is_valid_node_name(name):
:param: name: the node name to check.
:returns: True if the name is valid, False otherwise.
"""
- return is_valid_logical_name(name) and not uuidutils.is_uuid_like(name)
+ return utils.is_hostname_safe(name) and (not uuidutils.is_uuid_like(name))
+
+
+def get_driver_by_name(driver_name):
+ _driver_factory = driver_factory.DriverFactory()
+ try:
+ driver = _driver_factory[driver_name]
+ return driver.obj
+ except Exception:
+ return None
def is_valid_logical_name(name):
diff --git a/ironic/common/context.py b/ironic/common/context.py
index ccd2222..b8186c9 100644
--- a/ironic/common/context.py
+++ b/ironic/common/context.py
@@ -65,5 +65,4 @@ class RequestContext(context.RequestContext):
@classmethod
def from_dict(cls, values):
values.pop('user', None)
- values.pop('tenant', None)
return cls(**values)
diff --git a/ironic/common/images.py b/ironic/common/images.py
index 5b00e65..28e6bd7 100644
--- a/ironic/common/images.py
+++ b/ironic/common/images.py
@@ -328,16 +328,17 @@ def convert_image(source, dest, out_format, run_as_root=False):
utils.execute(*cmd, run_as_root=run_as_root)
-def fetch(context, image_href, path, force_raw=False):
+def fetch(context, image_href, path, force_raw=False, image_service=None):
# TODO(vish): Improve context handling and add owner and auth data
# when it is added to glance. Right now there is no
# auth checking in glance, so we assume that access was
# checked before we got here.
- image_service = service.get_image_service(image_href,
- context=context)
- LOG.debug("Using %(image_service)s to download image %(image_href)s." %
- {'image_service': image_service.__class__,
- 'image_href': image_href})
+ if not image_service:
+ image_service = service.get_image_service(image_href,
+ context=context)
+ LOG.debug("Using %(image_service)s to download image %(image_href)s." %
+ {'image_service': image_service.__class__,
+ 'image_href': image_href})
with fileutils.remove_path_on_error(path):
with open(path, "wb") as image_file:
diff --git a/ironic/common/states.py b/ironic/common/states.py
index e61c807..2523a7f 100644
--- a/ironic/common/states.py
+++ b/ironic/common/states.py
@@ -245,6 +245,9 @@ machine.add_state(INSPECTFAIL, target=MANAGEABLE, **watchers)
# A deployment may fail
machine.add_transition(DEPLOYING, DEPLOYFAIL, 'fail')
+# A deployment may be terminated
+machine.add_transition(DEPLOYING, DELETING, 'delete')
+
# A failed deployment may be retried
# ironic/conductor/manager.py:do_node_deploy()
machine.add_transition(DEPLOYFAIL, DEPLOYING, 'rebuild')
diff --git a/ironic/common/swift.py b/ironic/common/swift.py
index 8fa2d65..14d6b55 100644
--- a/ironic/common/swift.py
+++ b/ironic/common/swift.py
@@ -24,6 +24,7 @@ from swiftclient import utils as swift_utils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import keystone
+from ironic.common import utils
swift_opts = [
cfg.IntOpt('swift_max_retries',
@@ -36,6 +37,13 @@ swift_opts = [
CONF = cfg.CONF
CONF.register_opts(swift_opts, group='swift')
+CONF.import_opt('swift_endpoint_url',
+ 'ironic.common.glance_service.v2.image_service',
+ group='glance')
+CONF.import_opt('swift_api_version',
+ 'ironic.common.glance_service.v2.image_service',
+ group='glance')
+
CONF.import_opt('admin_user', 'keystonemiddleware.auth_token',
group='keystone_authtoken')
CONF.import_opt('admin_tenant_name', 'keystonemiddleware.auth_token',
@@ -62,7 +70,9 @@ class SwiftAPI(object):
tenant_name=CONF.keystone_authtoken.admin_tenant_name,
key=CONF.keystone_authtoken.admin_password,
auth_url=CONF.keystone_authtoken.auth_uri,
- auth_version=CONF.keystone_authtoken.auth_version):
+ auth_version=CONF.keystone_authtoken.auth_version,
+ preauthtoken=None,
+ preauthtenant=None):
"""Constructor for creating a SwiftAPI object.
:param user: the name of the user for Swift account
@@ -70,16 +80,41 @@ class SwiftAPI(object):
:param key: the 'password' or key to authenticate with
:param auth_url: the url for authentication
:param auth_version: the version of api to use for authentication
+ :param preauthtoken: authentication token (if you have already
+ authenticated) note authurl/user/key/tenant_name
+ are not required when specifying preauthtoken
+ :param preauthtenant a tenant that will be accessed using the
+ preauthtoken
"""
- auth_url = keystone.get_keystone_url(auth_url, auth_version)
- params = {'retries': CONF.swift.swift_max_retries,
- 'insecure': CONF.keystone_authtoken.insecure,
- 'cacert': CONF.keystone_authtoken.cafile,
- 'user': user,
- 'tenant_name': tenant_name,
- 'key': key,
- 'authurl': auth_url,
- 'auth_version': auth_version}
+ params = {
+ 'retries': CONF.swift.swift_max_retries,
+ 'insecure': CONF.keystone_authtoken.insecure,
+ 'cacert': CONF.keystone_authtoken.cafile
+ }
+
+ if preauthtoken:
+ # Determining swift url for the user's tenant account.
+ tenant_id = utils.get_tenant_id(tenant_name=preauthtenant)
+ url = "{endpoint}/{api_ver}/AUTH_{tenant}".format(
+ endpoint=CONF.glance.swift_endpoint_url,
+ api_ver=CONF.glance.swift_api_version,
+ tenant=tenant_id
+ )
+ # authurl/user/key/tenant_name are not required when specifying
+ # preauthtoken
+ params.update({
+ 'preauthtoken': preauthtoken,
+ 'preauthurl': url
+ })
+ else:
+ auth_url = keystone.get_keystone_url(auth_url, auth_version)
+ params.update({
+ 'user': user,
+ 'tenant_name': tenant_name,
+ 'key': key,
+ 'authurl': auth_url,
+ 'auth_version': auth_version
+ })
self.connection = swift_client.Connection(**params)
@@ -131,8 +166,8 @@ class SwiftAPI(object):
operation = _("head account")
raise exception.SwiftOperationError(operation=operation,
error=e)
-
- storage_url, token = self.connection.get_auth()
+ storage_url = (self.connection.os_options.get('object_storage_url') or
+ self.connection.get_auth()[0])
parse_result = parse.urlparse(storage_url)
swift_object_path = '/'.join((parse_result.path, container, object))
temp_url_key = account_info['x-account-meta-temp-url-key']
@@ -189,3 +224,23 @@ class SwiftAPI(object):
except swift_exceptions.ClientException as e:
operation = _("post object")
raise exception.SwiftOperationError(operation=operation, error=e)
+
+ def get_object(self, container, object, object_headers=None,
+ chunk_size=None):
+ """Get Swift object.
+
+ :param container: The name of the container in which Swift object
+ is placed.
+ :param object: The name of the object in Swift
+ :param object_headers: the headers for the object to pass to Swift
+ :param chunk_size: size of the chunk used read to read from response
+ :returns: Tuple (body, headers)
+ :raises: SwiftOperationError, if operation with Swift fails.
+ """
+ try:
+ return self.connection.get_object(container, object,
+ headers=object_headers,
+ resp_chunk_size=chunk_size)
+ except swift_exceptions.ClientException as e:
+ operation = _("get object")
+ raise exception.SwiftOperationError(operation=operation, error=e)
diff --git a/ironic/common/utils.py b/ironic/common/utils.py
index f863087..ed4398f 100644
--- a/ironic/common/utils.py
+++ b/ironic/common/utils.py
@@ -42,6 +42,7 @@ from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LE
from ironic.common.i18n import _LW
+from ironic.common import keystone
utils_opts = [
cfg.StrOpt('rootwrap_config',
@@ -560,6 +561,11 @@ def is_http_url(url):
return url.startswith('http://') or url.startswith('https://')
+def get_tenant_id(tenant_name):
+ ksclient = keystone._get_ksclient()
+ return ksclient.tenants.find(name=tenant_name).to_dict()['id']
+
+
def check_dir(directory_to_check=None, required_space=1):
"""Check a directory is usable.
diff --git a/ironic/conductor/manager.py b/ironic/conductor/manager.py
index b4bee31..e5bb190 100644
--- a/ironic/conductor/manager.py
+++ b/ironic/conductor/manager.py
@@ -768,6 +768,10 @@ class ConductorManager(periodic_task.PeriodicTasks):
"""
LOG.debug("RPC do_node_tear_down called for node %s." % node_id)
+ with task_manager.acquire(context, node_id, shared=True) as task:
+ if (task.node.provision_state == states.DEPLOYING and
+ task.driver.deploy.can_terminate_deployment):
+ task.driver.deploy.terminate_deployment(task)
with task_manager.acquire(context, node_id, shared=False,
purpose='node tear down') as task:
try:
diff --git a/ironic/drivers/base.py b/ironic/drivers/base.py
index 098b7a0..6ebe05d 100644
--- a/ironic/drivers/base.py
+++ b/ironic/drivers/base.py
@@ -345,6 +345,13 @@ class DeployInterface(BaseInterface):
"""
pass
+ def terminate_deployment(self, *args, **kwargs):
+ pass
+
+ @property
+ def can_terminate_deployment(self):
+ return False
+
@six.add_metaclass(abc.ABCMeta)
class BootInterface(object):
diff --git a/ironic/drivers/modules/image_cache.py b/ironic/drivers/modules/image_cache.py
index 8bb1e23..8e1a921 100644
--- a/ironic/drivers/modules/image_cache.py
+++ b/ironic/drivers/modules/image_cache.py
@@ -27,6 +27,7 @@ from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import fileutils
+from oslo_utils import uuidutils
import six
from ironic.common import exception
@@ -60,7 +61,8 @@ _cache_cleanup_list = []
class ImageCache(object):
"""Class handling access to cache for master images."""
- def __init__(self, master_dir, cache_size, cache_ttl):
+ def __init__(self, master_dir, cache_size, cache_ttl,
+ image_service=None):
"""Constructor.
:param master_dir: cache directory to work on
@@ -70,6 +72,7 @@ class ImageCache(object):
self.master_dir = master_dir
self._cache_size = cache_size
self._cache_ttl = cache_ttl
+ self._image_service = image_service
if master_dir is not None:
fileutils.ensure_tree(master_dir)
@@ -94,23 +97,28 @@ class ImageCache(object):
# NOTE(ghe): We don't share images between instances/hosts
if not CONF.parallel_image_downloads:
with lockutils.lock(img_download_lock_name, 'ironic-'):
- _fetch(ctx, href, dest_path, force_raw)
+ _fetch(ctx, href, dest_path,
+ image_service=self._image_service,
+ force_raw=force_raw)
else:
- _fetch(ctx, href, dest_path, force_raw)
+ _fetch(ctx, href, dest_path, image_service=self._image_service,
+ force_raw=force_raw)
return
# TODO(ghe): have hard links and counts the same behaviour in all fs
- # NOTE(vdrok): File name is converted to UUID if it's not UUID already,
- # so that two images with same file names do not collide
- if service_utils.is_glance_image(href):
- master_file_name = service_utils.parse_image_ref(href)[0]
+ if uuidutils.is_uuid_like(href):
+ master_file_name = href
+
+ elif (self._image_service and
+ hasattr(self._image_service, 'get_image_unique_id')):
+ master_file_name = self._image_service.get_image_unique_id(href)
+
else:
- # NOTE(vdrok): Doing conversion of href in case it's unicode
- # string, UUID cannot be generated for unicode strings on python 2.
href_encoded = href.encode('utf-8') if six.PY2 else href
master_file_name = str(uuid.uuid5(uuid.NAMESPACE_URL,
href_encoded))
+
master_path = os.path.join(self.master_dir, master_file_name)
if CONF.parallel_image_downloads:
@@ -121,8 +129,8 @@ class ImageCache(object):
# NOTE(vdrok): After rebuild requested image can change, so we
# should ensure that dest_path and master_path (if exists) are
# pointing to the same file and their content is up to date
- cache_up_to_date = _delete_master_path_if_stale(master_path, href,
- ctx)
+ cache_up_to_date = _delete_master_path_if_stale(
+ master_path, href, ctx, img_service=self._image_service)
dest_up_to_date = _delete_dest_path_if_stale(master_path,
dest_path)
@@ -168,7 +176,8 @@ class ImageCache(object):
tmp_path = os.path.join(tmp_dir, href.split('/')[-1])
try:
- _fetch(ctx, href, tmp_path, force_raw)
+ _fetch(ctx, href, tmp_path, force_raw,
+ image_service=self._image_service)
# NOTE(dtantsur): no need for global lock here - master_path
# will have link count >1 at any moment, so won't be cleaned up
os.link(tmp_path, master_path)
@@ -308,10 +317,11 @@ def _free_disk_space_for(path):
return stat.f_frsize * stat.f_bavail
-def _fetch(context, image_href, path, force_raw=False):
+def _fetch(context, image_href, path, force_raw=False, image_service=None):
"""Fetch image and convert to raw format if needed."""
path_tmp = "%s.part" % path
- images.fetch(context, image_href, path_tmp, force_raw=False)
+ images.fetch(context, image_href, path_tmp, force_raw=False,
+ image_service=image_service)
# Notes(yjiang5): If glance can provide the virtual size information,
# then we can firstly clean cache and then invoke images.fetch().
if force_raw:
@@ -384,7 +394,7 @@ def cleanup(priority):
return _add_property_to_class_func
-def _delete_master_path_if_stale(master_path, href, ctx):
+def _delete_master_path_if_stale(master_path, href, ctx, img_service=None):
"""Delete image from cache if it is not up to date with href contents.
:param master_path: path to an image in master cache
@@ -397,7 +407,8 @@ def _delete_master_path_if_stale(master_path, href, ctx):
# Glance image contents cannot be updated without changing image's UUID
return os.path.exists(master_path)
if os.path.exists(master_path):
- img_service = image_service.get_image_service(href, context=ctx)
+ if not img_service:
+ img_service = image_service.get_image_service(href, context=ctx)
img_mtime = img_service.show(href).get('updated_at')
if not img_mtime:
# This means that href is not a glance image and doesn't have an
diff --git a/ironic/tests/common/test_swift.py b/ironic/tests/common/test_swift.py
index 43e3ef0..b2632c4 100644
--- a/ironic/tests/common/test_swift.py
+++ b/ironic/tests/common/test_swift.py
@@ -120,6 +120,7 @@ class SwiftTestCase(base.TestCase):
connection_obj_mock.get_auth.return_value = auth
head_ret_val = {'x-account-meta-temp-url-key': 'secretkey'}
connection_obj_mock.head_account.return_value = head_ret_val
+ connection_obj_mock.os_options = {}
gen_temp_url_mock.return_value = 'temp-url-path'
temp_url_returned = swiftapi.get_temp_url('container', 'object', 10)
connection_obj_mock.get_auth.assert_called_once_with()
diff --git a/ironic/tests/drivers/test_image_cache.py b/ironic/tests/drivers/test_image_cache.py
index 3d666cd..436aa49 100644
--- a/ironic/tests/drivers/test_image_cache.py
+++ b/ironic/tests/drivers/test_image_cache.py
@@ -59,7 +59,7 @@ class TestImageCacheFetch(base.TestCase):
self.cache.fetch_image(self.uuid, self.dest_path)
self.assertFalse(mock_download.called)
mock_fetch.assert_called_once_with(
- None, self.uuid, self.dest_path, True)
+ None, self.uuid, self.dest_path, True, image_service=None)
self.assertFalse(mock_clean_up.called)
@mock.patch.object(image_cache.ImageCache, 'clean_up', autospec=True)
@@ -75,7 +75,7 @@ class TestImageCacheFetch(base.TestCase):
mock_clean_up):
self.cache.fetch_image(self.uuid, self.dest_path)
mock_cache_upd.assert_called_once_with(self.master_path, self.uuid,
- None)
+ None, img_service=None)
mock_dest_upd.assert_called_once_with(self.master_path, self.dest_path)
self.assertFalse(mock_link.called)
self.assertFalse(mock_download.called)
@@ -94,7 +94,7 @@ class TestImageCacheFetch(base.TestCase):
mock_clean_up):
self.cache.fetch_image(self.uuid, self.dest_path)
mock_cache_upd.assert_called_once_with(self.master_path, self.uuid,
- None)
+ None, img_service=None)
mock_dest_upd.assert_called_once_with(self.master_path, self.dest_path)
mock_link.assert_called_once_with(self.master_path, self.dest_path)
self.assertFalse(mock_download.called)
@@ -113,7 +113,7 @@ class TestImageCacheFetch(base.TestCase):
mock_clean_up):
self.cache.fetch_image(self.uuid, self.dest_path)
mock_cache_upd.assert_called_once_with(self.master_path, self.uuid,
- None)
+ None, img_service=None)
mock_dest_upd.assert_called_once_with(self.master_path, self.dest_path)
self.assertFalse(mock_link.called)
mock_download.assert_called_once_with(
@@ -134,7 +134,7 @@ class TestImageCacheFetch(base.TestCase):
mock_clean_up):
self.cache.fetch_image(self.uuid, self.dest_path)
mock_cache_upd.assert_called_once_with(self.master_path, self.uuid,
- None)
+ None, img_service=None)
mock_dest_upd.assert_called_once_with(self.master_path, self.dest_path)
self.assertFalse(mock_link.called)
mock_download.assert_called_once_with(
@@ -158,7 +158,7 @@ class TestImageCacheFetch(base.TestCase):
@mock.patch.object(image_cache, '_fetch', autospec=True)
def test__download_image(self, mock_fetch):
- def _fake_fetch(ctx, uuid, tmp_path, *args):
+ def _fake_fetch(ctx, uuid, tmp_path, *args, **kwargs):
self.assertEqual(self.uuid, uuid)
self.assertNotEqual(self.dest_path, tmp_path)
self.assertNotEqual(os.path.dirname(tmp_path), self.master_dir)
@@ -430,7 +430,7 @@ class TestImageCacheCleanUp(base.TestCase):
@mock.patch.object(utils, 'rmtree_without_raise', autospec=True)
@mock.patch.object(image_cache, '_fetch', autospec=True)
def test_temp_images_not_cleaned(self, mock_fetch, mock_rmtree):
- def _fake_fetch(ctx, uuid, tmp_path, *args):
+ def _fake_fetch(ctx, uuid, tmp_path, *args, **kwargs):
with open(tmp_path, 'w') as fp:
fp.write("TEST" * 10)
@@ -675,7 +675,8 @@ class TestFetchCleanup(base.TestCase):
mock_size.return_value = 100
image_cache._fetch('fake', 'fake-uuid', '/foo/bar', force_raw=True)
mock_fetch.assert_called_once_with('fake', 'fake-uuid',
- '/foo/bar.part', force_raw=False)
+ '/foo/bar.part', image_service=None,
+ force_raw=False)
mock_clean.assert_called_once_with('/foo', 100)
mock_raw.assert_called_once_with('fake-uuid', '/foo/bar',
'/foo/bar.part')

View File

@ -1,98 +1,41 @@
diff --git a/nova/objects/image_meta.py b/nova/objects/image_meta.py
index 15be3f1..83fc2fb 100644
--- a/nova/objects/image_meta.py
+++ b/nova/objects/image_meta.py
@@ -346,6 +346,7 @@ class ImageMetaProps(base.NovaObject):
# is a fairly generic type. For a detailed type consider os_distro
# instead
'os_type': fields.OSTypeField(),
+ 'deploy_config': fields.StringField(),
}
# The keys are the legacy property names and
diff --git a/nova/tests/unit/objects/test_objects.py b/nova/tests/unit/objects/test_objects.py
index 031555f..e0368b8 100644
--- a/nova/tests/unit/objects/test_objects.py
+++ b/nova/tests/unit/objects/test_objects.py
@@ -1180,7 +1180,7 @@ object_data = {
'HostMapping': '1.0-1a3390a696792a552ab7bd31a77ba9ac',
'HVSpec': '1.1-6b4f7c0f688cbd03e24142a44eb9010d',
'ImageMeta': '1.7-642d1b2eb3e880a367f37d72dd76162d',
- 'ImageMetaProps': '1.7-f12fc4cf3e25d616f69a66fb9d2a7aa6',
+ 'ImageMetaProps': '1.7-716042e9e80ea16890f475200940d6f9',
'Instance': '2.0-ff56804dce87d81d9a04834d4bd1e3d2',
# NOTE(danms): Reviewers: do not approve changes to the Instance1
# object schema. It is frozen for Liberty and will be removed in
diff --git a/nova/tests/unit/virt/ironic/test_driver.py b/nova/tests/unit/virt/ironic/test_driver.py
index b19c6eb..6305ff7 100644
index a8c653a..940497c 100644
--- a/nova/tests/unit/virt/ironic/test_driver.py
+++ b/nova/tests/unit/virt/ironic/test_driver.py
@@ -24,6 +24,7 @@ from oslo_utils import uuidutils
from nova.api.metadata import base as instance_metadata
from nova.compute import power_state as nova_states
from nova.compute import task_states
+from nova.compute import vm_states
from nova import context as nova_context
from nova import exception
from nova import objects
@@ -143,8 +144,9 @@ class IronicDriverTestCase(test.NoDBTestCase):
ironic_driver._validate_instance_and_node,
ironicclient, instance)
+ @mock.patch.object(objects.Instance, 'refresh')
@mock.patch.object(ironic_driver, '_validate_instance_and_node')
- def test__wait_for_active_pass(self, fake_validate):
+ def test__wait_for_active_pass(self, fake_validate, fake_refresh):
instance = fake_instance.fake_instance_obj(self.ctx,
uuid=uuidutils.generate_uuid())
node = ironic_utils.get_test_node(
@@ -152,10 +154,12 @@ class IronicDriverTestCase(test.NoDBTestCase):
fake_validate.return_value = node
self.driver._wait_for_active(FAKE_CLIENT, instance)
- self.assertTrue(fake_validate.called)
+ fake_validate.assert_called_once_with(FAKE_CLIENT, instance)
+ fake_refresh.assert_called_once_with()
+ @mock.patch.object(objects.Instance, 'refresh')
@mock.patch.object(ironic_driver, '_validate_instance_and_node')
- def test__wait_for_active_done(self, fake_validate):
+ def test__wait_for_active_done(self, fake_validate, fake_refresh):
instance = fake_instance.fake_instance_obj(self.ctx,
uuid=uuidutils.generate_uuid())
node = ironic_utils.get_test_node(
@@ -165,10 +169,12 @@ class IronicDriverTestCase(test.NoDBTestCase):
self.assertRaises(loopingcall.LoopingCallDone,
self.driver._wait_for_active,
FAKE_CLIENT, instance)
- self.assertTrue(fake_validate.called)
+ fake_validate.assert_called_once_with(FAKE_CLIENT, instance)
+ fake_refresh.assert_called_once_with()
+ @mock.patch.object(objects.Instance, 'refresh')
@mock.patch.object(ironic_driver, '_validate_instance_and_node')
- def test__wait_for_active_fail(self, fake_validate):
+ def test__wait_for_active_fail(self, fake_validate, fake_refresh):
instance = fake_instance.fake_instance_obj(self.ctx,
uuid=uuidutils.generate_uuid())
node = ironic_utils.get_test_node(
@@ -178,7 +184,31 @@ class IronicDriverTestCase(test.NoDBTestCase):
self.assertRaises(exception.InstanceDeployFailure,
self.driver._wait_for_active,
FAKE_CLIENT, instance)
- self.assertTrue(fake_validate.called)
+ fake_validate.assert_called_once_with(FAKE_CLIENT, instance)
+ fake_refresh.assert_called_once_with()
+
+ @mock.patch.object(objects.Instance, 'refresh')
+ @mock.patch.object(ironic_driver, '_validate_instance_and_node')
+ def _wait_for_active_abort(self, instance_params, fake_validate,
+ fake_refresh):
+ instance = fake_instance.fake_instance_obj(self.ctx,
+ uuid=uuidutils.generate_uuid(),
+ **instance_params)
+ self.assertRaises(exception.InstanceDeployFailure,
+ self.driver._wait_for_active,
+ FAKE_CLIENT, instance)
+ # Assert _validate_instance_and_node wasn't called
+ self.assertFalse(fake_validate.called)
+ fake_refresh.assert_called_once_with()
+
+ def test__wait_for_active_abort_deleting(self):
+ self._wait_for_active_abort({'task_state': task_states.DELETING})
+
+ def test__wait_for_active_abort_deleted(self):
+ self._wait_for_active_abort({'vm_state': vm_states.DELETED})
+
+ def test__wait_for_active_abort_error(self):
+ self._wait_for_active_abort({'vm_state': vm_states.ERROR})
@mock.patch.object(ironic_driver, '_validate_instance_and_node')
def test__wait_for_power_state_pass(self, fake_validate):
@@ -626,6 +656,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -799,6 +799,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
result = self.driver.macs_for_instance(instance)
self.assertIsNone(result)
+ @mock.patch.object(ironic_driver.IronicDriver, '_get_switch_boot_options')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(loopingcall, 'FixedIntervalLoopingCall')
@mock.patch.object(FAKE_CLIENT, 'node')
@@ -634,7 +665,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -807,7 +808,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
@mock.patch.object(ironic_driver.IronicDriver, '_plug_vifs')
@mock.patch.object(ironic_driver.IronicDriver, '_start_firewall')
def _test_spawn(self, mock_sf, mock_pvifs, mock_adf, mock_wait_active,
@ -101,18 +44,18 @@ index b19c6eb..6305ff7 100644
node_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
node = ironic_utils.get_test_node(driver='fake', uuid=node_uuid)
instance = fake_instance.fake_instance_obj(self.ctx, node=node_uuid)
@@ -668,6 +699,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -845,6 +846,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
fake_looping_call.start.assert_called_once_with(
interval=CONF.ironic.api_retry_interval)
fake_looping_call.wait.assert_called_once_with()
+ mock_sb_options.assert_called_once_with(self.ctx, instance, node_uuid)
@mock.patch.object(ironic_driver.IronicDriver, '_generate_configdrive')
@mock.patch.object(configdrive, 'required_by')
@@ -720,14 +752,61 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -897,14 +899,62 @@ class IronicDriverTestCase(test.NoDBTestCase):
self.driver.spawn, self.ctx, instance, None, [], None)
mock_destroy.assert_called_once_with(self.ctx, instance, None)
+ @mock.patch.object(FAKE_CLIENT, 'node')
+ def test__get_switch_boot_options(self, mock_node):
+ node_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
@ -160,23 +103,25 @@ index b19c6eb..6305ff7 100644
node = ironic_utils.get_test_node(driver='fake')
- instance = fake_instance.fake_instance_obj(self.ctx,
- node=node.uuid)
- image_meta = ironic_utils.get_test_image_meta_object()
+ instance = fake_instance.fake_instance_obj(
+ self.ctx,
+ node=node.uuid,
+ expected_attrs=('metadata',))
image_meta = ironic_utils.get_test_image_meta()
+ mock_get_depl_conf_opts.return_value = {'foo': 'bar123'}
+ instance['metadata']['driver_actions'] = {'bar': 'foo123'}
flavor = ironic_utils.get_test_flavor()
+
+ image_meta = ironic_utils.get_test_image_meta_object()
+ mock_get_depl_conf_opts.return_value = {'foo': 'bar123'}
+ instance['metadata']['driver_actions'] = 'test_driver_actions'
+
self.driver._add_driver_fields(node, instance, image_meta, flavor)
+
expected_patch = [{'path': '/instance_info/image_source', 'op': 'add',
'value': image_meta['id']},
'value': image_meta.id},
{'path': '/instance_info/root_gb', 'op': 'add',
@@ -735,21 +814,96 @@ class IronicDriverTestCase(test.NoDBTestCase):
{'path': '/instance_info/swap_mb', 'op': 'add',
'value': str(flavor['swap'])},
@@ -920,21 +970,96 @@ class IronicDriverTestCase(test.NoDBTestCase):
{'path': '/instance_info/local_gb', 'op': 'add',
'value': str(node.properties.get('local_gb', 0))},
{'path': '/instance_uuid', 'op': 'add',
- 'value': instance.uuid}]
+ 'value': instance.uuid},
@ -185,10 +130,10 @@ index b19c6eb..6305ff7 100644
+ 'value': {'foo': 'bar123'}},
+ {'path': '/instance_info/driver_actions',
+ 'op': 'add',
+ 'value': {'bar': 'foo123'}},
+ 'value': 'test_driver_actions'},
+ ]
mock_update.assert_called_once_with(node.uuid, expected_patch)
@mock.patch.object(FAKE_CLIENT.node, 'update')
def test__add_driver_fields_fail(self, mock_update):
mock_update.side_effect = ironic_exception.BadRequest()
@ -199,16 +144,16 @@ index b19c6eb..6305ff7 100644
+ self.ctx,
+ node=node.uuid,
+ expected_attrs=('metadata',))
image_meta = ironic_utils.get_test_image_meta()
image_meta = ironic_utils.get_test_image_meta_object()
flavor = ironic_utils.get_test_flavor()
self.assertRaises(exception.InstanceDeployFailure,
self.driver._add_driver_fields,
node, instance, image_meta, flavor)
+ def test__get_deploy_config_options_all_present(self):
+ node = ironic_utils.get_test_node(
+ driver='fake', driver_info={'deploy_config': "node-conf"})
+ image_meta = ironic_utils.get_test_image_meta(
+ image_meta = ironic_utils.get_test_image_meta_object(
+ deploy_config="image-conf")
+ instance = fake_instance.fake_instance_obj(
+ self.ctx, node=node.uuid, expected_attrs=('metadata',),
@ -233,7 +178,7 @@ index b19c6eb..6305ff7 100644
+ "image": "previous_image_conf",
+ }}
+ )
+ image_meta = ironic_utils.get_test_image_meta(
+ image_meta = ironic_utils.get_test_image_meta_object(
+ deploy_config="image-conf")
+ instance = fake_instance.fake_instance_obj(
+ self.ctx, node=node.uuid, expected_attrs=('metadata',))
@ -248,7 +193,7 @@ index b19c6eb..6305ff7 100644
+
+ def test__get_deploy_config_options_some_present(self):
+ node = ironic_utils.get_test_node(driver='fake')
+ image_meta = ironic_utils.get_test_image_meta()
+ image_meta = ironic_utils.get_test_image_meta_object()
+ instance = fake_instance.fake_instance_obj(
+ self.ctx, node=node.uuid, expected_attrs=('metadata',),
+ metadata={'deploy_config': "instance-conf"})
@ -261,7 +206,7 @@ index b19c6eb..6305ff7 100644
+
+ def test__get_deploy_config_options_none_present(self):
+ node = ironic_utils.get_test_node(driver='fake')
+ image_meta = ironic_utils.get_test_image_meta()
+ image_meta = ironic_utils.get_test_image_meta_object()
+ instance = fake_instance.fake_instance_obj(
+ self.ctx, node=node.uuid, expected_attrs=('metadata',))
+
@ -274,7 +219,7 @@ index b19c6eb..6305ff7 100644
@mock.patch.object(FAKE_CLIENT.node, 'update')
def test__cleanup_deploy_good_with_flavor(self, mock_update):
node = ironic_utils.get_test_node(driver='fake',
@@ -781,8 +935,10 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -983,8 +1108,10 @@ class IronicDriverTestCase(test.NoDBTestCase):
node = ironic_utils.get_test_node(driver='fake',
instance_uuid=self.instance_uuid)
flavor = ironic_utils.get_test_flavor(extra_specs={})
@ -287,7 +232,7 @@ index b19c6eb..6305ff7 100644
instance.flavor = flavor
self.assertRaises(exception.InstanceTerminationFailure,
self.driver._cleanup_deploy,
@@ -796,7 +952,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -998,7 +1125,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
node_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
node = ironic_utils.get_test_node(driver='fake', uuid=node_uuid)
flavor = ironic_utils.get_test_flavor()
@ -295,9 +240,9 @@ index b19c6eb..6305ff7 100644
+ instance = fake_instance.fake_instance_obj(
+ self.ctx, node=node_uuid, expected_attrs=('metadata',))
instance.flavor = flavor
mock_node.validate.return_value = ironic_utils.get_test_validation(
@@ -821,7 +978,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -1023,7 +1151,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
node_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
node = ironic_utils.get_test_node(driver='fake', uuid=node_uuid)
flavor = ironic_utils.get_test_flavor()
@ -307,7 +252,7 @@ index b19c6eb..6305ff7 100644
instance.flavor = flavor
mock_node.get.return_value = node
mock_node.validate.return_value = ironic_utils.get_test_validation()
@@ -851,7 +1009,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -1053,7 +1182,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
node_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
node = ironic_utils.get_test_node(driver='fake', uuid=node_uuid)
flavor = ironic_utils.get_test_flavor()
@ -316,8 +261,8 @@ index b19c6eb..6305ff7 100644
+ self.ctx, node=node_uuid, expected_attrs=('metadata',))
instance.flavor = flavor
image_meta = ironic_utils.get_test_image_meta()
@@ -880,7 +1039,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -1082,7 +1212,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
node_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
node = ironic_utils.get_test_node(driver='fake', uuid=node_uuid)
flavor = ironic_utils.get_test_flavor()
@ -326,9 +271,9 @@ index b19c6eb..6305ff7 100644
+ self.ctx, node=node_uuid, expected_attrs=('metadata',))
instance.flavor = flavor
image_meta = ironic_utils.get_test_image_meta()
@@ -912,7 +1072,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
fake_net_info = utils.get_test_network_info()
@@ -1113,7 +1244,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
node_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
node = ironic_utils.get_test_node(driver='fake', uuid=node_uuid)
flavor = ironic_utils.get_test_flavor()
- instance = fake_instance.fake_instance_obj(self.ctx, node=node_uuid)
@ -336,8 +281,8 @@ index b19c6eb..6305ff7 100644
+ self.ctx, node=node_uuid, expected_attrs=('metadata',))
instance.flavor = flavor
image_meta = ironic_utils.get_test_image_meta()
@@ -945,7 +1106,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -1146,7 +1278,8 @@ class IronicDriverTestCase(test.NoDBTestCase):
node_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
node = ironic_utils.get_test_node(driver='fake', uuid=node_uuid)
flavor = ironic_utils.get_test_flavor(ephemeral_gb=1)
@ -347,71 +292,17 @@ index b19c6eb..6305ff7 100644
instance.flavor = flavor
mock_node.get_by_instance_uuid.return_value = node
mock_node.set_provision_state.return_value = mock.MagicMock()
@@ -957,12 +1119,12 @@ class IronicDriverTestCase(test.NoDBTestCase):
@mock.patch.object(FAKE_CLIENT, 'node')
@mock.patch.object(ironic_driver.IronicDriver, '_cleanup_deploy')
- def test_destroy(self, mock_cleanup_deploy, mock_node):
+ def _test_destroy(self, state, mock_cleanup_deploy, mock_node):
node_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
network_info = 'foo'
node = ironic_utils.get_test_node(driver='fake', uuid=node_uuid,
- provision_state=ironic_states.ACTIVE)
+ provision_state=state)
instance = fake_instance.fake_instance_obj(self.ctx, node=node_uuid)
def fake_set_provision_state(*_):
@@ -971,29 +1133,22 @@ class IronicDriverTestCase(test.NoDBTestCase):
mock_node.get_by_instance_uuid.return_value = node
mock_node.set_provision_state.side_effect = fake_set_provision_state
self.driver.destroy(self.ctx, instance, network_info, None)
- mock_node.set_provision_state.assert_called_once_with(node_uuid,
- 'deleted')
+
mock_node.get_by_instance_uuid.assert_called_with(instance.uuid)
mock_cleanup_deploy.assert_called_with(self.ctx, node,
instance, network_info)
- @mock.patch.object(FAKE_CLIENT, 'node')
- @mock.patch.object(ironic_driver.IronicDriver, '_cleanup_deploy')
- def test_destroy_ignore_unexpected_state(self, mock_cleanup_deploy,
- mock_node):
- node_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
- network_info = 'foo'
+ # For states that makes sense check if set_provision_state has
+ # been called
+ if state in ironic_driver._UNPROVISION_STATES:
+ mock_node.set_provision_state.assert_called_once_with(
+ node_uuid, 'deleted')
+ else:
+ self.assertFalse(mock_node.set_provision_state.called)
- node = ironic_utils.get_test_node(driver='fake', uuid=node_uuid,
- provision_state=ironic_states.DELETING)
- instance = fake_instance.fake_instance_obj(self.ctx, node=node_uuid)
-
- mock_node.get_by_instance_uuid.return_value = node
- self.driver.destroy(self.ctx, instance, network_info, None)
- self.assertFalse(mock_node.set_provision_state.called)
- mock_node.get_by_instance_uuid.assert_called_with(instance.uuid)
- mock_cleanup_deploy.assert_called_with(self.ctx, node, instance,
- network_info)
+ def test_destroy(self):
+ for state in ironic_states.PROVISION_STATE_LIST:
+ self._test_destroy(state)
@mock.patch.object(FAKE_CLIENT, 'node')
@mock.patch.object(ironic_driver.IronicDriver, '_cleanup_deploy')
@@ -1287,6 +1442,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -1541,15 +1674,16 @@ class IronicDriverTestCase(test.NoDBTestCase):
self.driver.refresh_instance_security_rules(fake_group)
mock_risr.assert_called_once_with(fake_group)
+ @mock.patch.object(ironic_driver.IronicDriver, '_get_switch_boot_options')
@mock.patch.object(ironic_driver.IronicDriver, '_wait_for_active')
@mock.patch.object(loopingcall, 'FixedIntervalLoopingCall')
@mock.patch.object(FAKE_CLIENT.node, 'set_provision_state')
@@ -1295,7 +1451,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
@mock.patch.object(ironic_driver.IronicDriver, '_add_driver_fields')
- @mock.patch.object(FAKE_CLIENT.node, 'get')
+ @mock.patch.object(FAKE_CLIENT.node, 'get_by_instance_uuid')
@mock.patch.object(objects.Instance, 'save')
def _test_rebuild(self, mock_save, mock_get, mock_driver_fields,
mock_set_pstate, mock_looping, mock_wait_active,
@ -420,10 +311,10 @@ index b19c6eb..6305ff7 100644
node_uuid = uuidutils.generate_uuid()
node = ironic_utils.get_test_node(uuid=node_uuid,
instance_uuid=self.instance_uuid,
@@ -1306,10 +1462,12 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -1560,10 +1694,12 @@ class IronicDriverTestCase(test.NoDBTestCase):
flavor_id = 5
flavor = objects.Flavor(flavor_id=flavor_id, name='baremetal')
- instance = fake_instance.fake_instance_obj(self.ctx,
- uuid=self.instance_uuid,
- node=node_uuid,
@ -435,20 +326,29 @@ index b19c6eb..6305ff7 100644
+ instance_type_id=flavor_id,
+ expected_attrs=('metadata',))
instance.flavor = flavor
fake_looping_call = FakeLoopingCall()
@@ -1333,6 +1491,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -1589,6 +1725,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
fake_looping_call.start.assert_called_once_with(
interval=CONF.ironic.api_retry_interval)
fake_looping_call.wait.assert_called_once_with()
+ mock_sb_options.assert_called_once_with(self.ctx, instance, node_uuid)
def test_rebuild_preserve_ephemeral(self):
self._test_rebuild(preserve=True)
@@ -1356,10 +1515,12 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -1598,7 +1735,7 @@ class IronicDriverTestCase(test.NoDBTestCase):
@mock.patch.object(FAKE_CLIENT.node, 'set_provision_state')
@mock.patch.object(ironic_driver.IronicDriver, '_add_driver_fields')
- @mock.patch.object(FAKE_CLIENT.node, 'get')
+ @mock.patch.object(FAKE_CLIENT.node, 'get_by_instance_uuid')
@mock.patch.object(objects.Instance, 'save')
def test_rebuild_failures(self, mock_save, mock_get, mock_driver_fields,
mock_set_pstate):
@@ -1612,10 +1749,12 @@ class IronicDriverTestCase(test.NoDBTestCase):
flavor_id = 5
flavor = objects.Flavor(flavor_id=flavor_id, name='baremetal')
- instance = fake_instance.fake_instance_obj(self.ctx,
- uuid=self.instance_uuid,
- node=node_uuid,
@ -460,19 +360,19 @@ index b19c6eb..6305ff7 100644
+ instance_type_id=flavor_id,
+ expected_attrs=('metadata',))
instance.flavor = flavor
exceptions = [
@@ -1375,6 +1536,305 @@ class IronicDriverTestCase(test.NoDBTestCase):
@@ -1631,6 +1770,316 @@ class IronicDriverTestCase(test.NoDBTestCase):
injected_files=None, admin_password=None, bdms=None,
detach_block_devices=None, attach_block_devices=None)
+ @mock.patch.object(ironic_driver.IronicDriver, '_do_rebuild')
+ @mock.patch.object(ironic_driver.IronicDriver, '_get_switch_boot_options')
+ @mock.patch.object(ironic_driver.IronicDriver, '_wait_for_active')
+ @mock.patch.object(loopingcall, 'FixedIntervalLoopingCall')
+ @mock.patch.object(FAKE_CLIENT.node, 'set_provision_state')
+ @mock.patch.object(ironic_driver.IronicDriver, '_add_driver_fields')
+ @mock.patch.object(FAKE_CLIENT.node, 'get')
+ @mock.patch.object(FAKE_CLIENT.node, 'get_by_instance_uuid')
+ @mock.patch.object(objects.Instance, 'save')
+ def test_rebuild_multiboot_force_rebuild(self, mock_save, mock_get,
+ mock_driver_fields,
@ -486,7 +386,8 @@ index b19c6eb..6305ff7 100644
+ instance_info={'multiboot': True})
+ mock_get.return_value = node
+
+ image_meta = ironic_utils.get_test_image_meta()
+ image_meta = ironic_utils.get_test_image_meta_object()
+
+ flavor_id = 5
+ flavor = objects.Flavor(flavor_id=flavor_id, name='baremetal')
+
@ -502,11 +403,14 @@ index b19c6eb..6305ff7 100644
+ fake_looping_call = FakeLoopingCall()
+ mock_looping.return_value = fake_looping_call
+
+ self.driver.rebuild(
+ context=self.ctx, instance=instance, image_meta=image_meta,
+ injected_files=None, admin_password=None, bdms=None,
+ detach_block_devices=None, attach_block_devices=None,
+ preserve_ephemeral=False)
+ with mock.patch.object(objects.ImageMeta,
+ 'from_dict') as mock_image_meta_from_dict:
+ mock_image_meta_from_dict.return_value = image_meta
+ self.driver.rebuild(
+ context=self.ctx, instance=instance, image_meta=image_meta,
+ injected_files=None, admin_password=None, bdms=None,
+ detach_block_devices=None, attach_block_devices=None,
+ preserve_ephemeral=False)
+
+ rebuild_mock.assert_called_once_with(
+ self.ctx, FAKE_CLIENT_WRAPPER, node, instance, image_meta,
@ -517,18 +421,25 @@ index b19c6eb..6305ff7 100644
+ block_device_info=None,
+ preserve_ephemeral=False)
+
+ @mock.patch.object(objects.ImageMeta, 'from_dict')
+ @mock.patch.object(FAKE_CLIENT.node, 'get_by_instance_uuid')
+ @mock.patch.object(FAKE_CLIENT.node, 'get')
+ @mock.patch.object(ironic_driver.IronicDriver, '_do_switch_boot_device')
+ @mock.patch.object(objects.Instance, 'save')
+ def test_rebuild_multiboot_switch_boot(self, mock_save,
+ mock_sb, mock_get):
+ mock_sb, mock_get,
+ mock_get_by_instance,
+ mock_image_meta_from_dict):
+ node_uuid = uuidutils.generate_uuid()
+ node = ironic_utils.get_test_node(uuid=node_uuid,
+ instance_uuid=self.instance_uuid,
+ instance_type_id=5,
+ instance_info={'multiboot': True})
+ mock_get.return_value = node
+ image_meta = ironic_utils.get_test_image_meta()
+ mock_get.return_value = mock_get_by_instance.return_value = node
+
+ image_meta = ironic_utils.get_test_image_meta_object()
+ mock_image_meta_from_dict.return_value = image_meta
+
+ flavor_id = 5
+ instance = fake_instance.fake_instance_obj(
+ self.ctx,
@ -558,7 +469,7 @@ index b19c6eb..6305ff7 100644
+ instance_type_id=5,
+ instance_info={'multiboot': True})
+
+ image_meta = ironic_utils.get_test_image_meta()
+ image_meta = ironic_utils.get_test_image_meta_object()
+ flavor_id = 5
+ instance = fake_instance.fake_instance_obj(
+ self.ctx,
@ -573,13 +484,13 @@ index b19c6eb..6305ff7 100644
+ node, instance, image_meta)
+
+ vp_mock.assert_called_once_with(node_uuid, 'switch_boot',
+ {'image': image_meta['id'],
+ {'image': image_meta.id,
+ 'ssh_user': 'usr1',
+ 'ssh_key': 'key1'})
+ sp_mock.assert_called_once_with(node_uuid, 'reboot')
+ upd_mock.assert_called_once_with(
+ node_uuid, [{'path': '/instance_info/image_source', 'op': 'add',
+ 'value': image_meta['id']}])
+ 'value': image_meta.id}])
+
+ @mock.patch.object(FAKE_CLIENT.node, 'vendor_passthru')
+ @mock.patch.object(FAKE_CLIENT.node, 'set_power_state')
@ -633,7 +544,7 @@ index b19c6eb..6305ff7 100644
+ 'image_source': 'original_image',
+ })
+
+ image_meta = ironic_utils.get_test_image_meta()
+ image_meta = ironic_utils.get_test_image_meta_object()
+ flavor_id = 5
+ instance = fake_instance.fake_instance_obj(
+ self.ctx,
@ -671,7 +582,7 @@ index b19c6eb..6305ff7 100644
+ 'image_source': 'original_image',
+ })
+
+ image_meta = ironic_utils.get_test_image_meta()
+ image_meta = ironic_utils.get_test_image_meta_object()
+ flavor_id = 5
+ instance = fake_instance.fake_instance_obj(
+ self.ctx,
@ -707,7 +618,7 @@ index b19c6eb..6305ff7 100644
+ 'image_source': 'original_image',
+ })
+
+ image_meta = ironic_utils.get_test_image_meta()
+ image_meta = ironic_utils.get_test_image_meta_object()
+ flavor_id = 5
+ instance = fake_instance.fake_instance_obj(
+ self.ctx,
@ -744,7 +655,7 @@ index b19c6eb..6305ff7 100644
+ 'image_source': 'original_image',
+ })
+
+ image_meta = ironic_utils.get_test_image_meta()
+ image_meta = ironic_utils.get_test_image_meta_object()
+ flavor_id = 5
+ instance = fake_instance.fake_instance_obj(
+ self.ctx,
@ -765,24 +676,25 @@ index b19c6eb..6305ff7 100644
+ instance.metadata['switch_boot_error'])
+ mock_save.assert_called_once_with()
+
@mock.patch.object(instance_metadata, 'InstanceMetadata')
@mock.patch.object(configdrive, 'ConfigDriveBuilder')
diff --git a/nova/tests/unit/virt/ironic/utils.py b/nova/tests/unit/virt/ironic/utils.py
index d43f290..f3ec825 100644
index 0e67919..66eede3 100644
--- a/nova/tests/unit/virt/ironic/utils.py
+++ b/nova/tests/unit/virt/ironic/utils.py
@@ -42,6 +42,7 @@ def get_test_node(**kw):
@@ -39,7 +39,7 @@ def get_test_node(**kw):
ironic_states.NOSTATE),
'last_error': kw.get('last_error'),
'instance_uuid': kw.get('instance_uuid'),
- 'instance_info': kw.get('instance_info'),
+ 'instance_info': kw.get('instance_info', {}),
'driver': kw.get('driver', 'fake'),
'driver_info': kw.get('driver_info', {}),
'properties': kw.get('properties', {}),
+ 'instance_info': kw.get('instance_info', {}),
'reservation': kw.get('reservation'),
'maintenance': kw.get('maintenance', False),
'extra': kw.get('extra', {}),
@@ -72,7 +73,11 @@ def get_test_flavor(**kw):
@@ -91,7 +91,11 @@ def get_test_flavor(**kw):
def get_test_image_meta(**kw):
- return {'id': kw.get('id', 'cccccccc-cccc-cccc-cccc-cccccccccccc')}
+ return {'id': kw.get('id', 'cccccccc-cccc-cccc-cccc-cccccccccccc'),
@ -790,43 +702,24 @@ index d43f290..f3ec825 100644
+ 'deploy_config': kw.get('deploy_config', ''),
+ 'driver_actions': kw.get('driver_actions', ''),
+ }}
class FakePortClient(object):
@@ -110,6 +115,9 @@ class FakeNodeClient(object):
def get_test_image_meta_object(**kw):
@@ -134,6 +138,9 @@ class FakeNodeClient(object):
def validate(self, node_uuid):
pass
+ def vendor_passthru(self, node_uuid, method, args):
+ pass
+
class FakeClient(object):
diff --git a/nova/virt/ironic/driver.py b/nova/virt/ironic/driver.py
index b21c782..81dcdba 100644
index 194221e..062f3d7 100644
--- a/nova/virt/ironic/driver.py
+++ b/nova/virt/ironic/driver.py
@@ -40,6 +40,7 @@ from nova.compute import hv_type
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import vm_mode
+from nova.compute import vm_states
from nova import context as nova_context
from nova import exception
from nova.i18n import _
@@ -107,6 +108,10 @@ _POWER_STATE_MAP = {
ironic_states.POWER_OFF: power_state.SHUTDOWN,
}
+_UNPROVISION_STATES = (ironic_states.ACTIVE, ironic_states.DEPLOYFAIL,
+ ironic_states.ERROR, ironic_states.DEPLOYWAIT,
+ ironic_states.DEPLOYING)
+
def map_power_state(state):
try:
@@ -326,6 +331,17 @@ class IronicDriver(virt_driver.ComputeDriver):
@@ -391,6 +391,17 @@ class IronicDriver(virt_driver.ComputeDriver):
# Associate the node with an instance
patch.append({'path': '/instance_uuid', 'op': 'add',
'value': instance.uuid})
@ -844,35 +737,21 @@ index b21c782..81dcdba 100644
try:
self.ironicclient.call('node.update', node.uuid, patch)
except ironic.exc.BadRequest:
@@ -335,6 +351,13 @@ class IronicDriver(virt_driver.ComputeDriver):
@@ -400,6 +411,12 @@ class IronicDriver(virt_driver.ComputeDriver):
LOG.error(msg)
raise exception.InstanceDeployFailure(msg)
+ def _update_driver_fields_after_switch_boot(self, context, node,
+ instance, image_meta):
+ patch = []
+ patch.append({'path': '/instance_info/image_source', 'op': 'add',
+ 'value': image_meta.get('id')})
+ patch = [{'path': '/instance_info/image_source', 'op': 'add',
+ 'value': image_meta.id}]
+ self.ironicclient.call('node.update', node.uuid, patch)
+
def _cleanup_deploy(self, context, node, instance, network_info,
flavor=None):
if flavor is None:
@@ -358,6 +381,12 @@ class IronicDriver(virt_driver.ComputeDriver):
def _wait_for_active(self, ironicclient, instance):
"""Wait for the node to be marked as ACTIVE in Ironic."""
+ instance.refresh()
+ if (instance.task_state == task_states.DELETING or
+ instance.vm_state in (vm_states.ERROR, vm_states.DELETED)):
+ raise exception.InstanceDeployFailure(
+ _("Instance %s provisioning was aborted") % instance.uuid)
+
node = _validate_instance_and_node(ironicclient, instance)
if node.provision_state == ironic_states.ACTIVE:
# job is done
@@ -714,9 +743,19 @@ class IronicDriver(virt_driver.ComputeDriver):
@@ -807,9 +824,19 @@ class IronicDriver(virt_driver.ComputeDriver):
# trigger the node deploy
try:
- self.ironicclient.call("node.set_provision_state", node_uuid,
@ -894,7 +773,7 @@ index b21c782..81dcdba 100644
except Exception as e:
with excutils.save_and_reraise_exception():
msg = (_LE("Failed to request Ironic to provision instance "
@@ -739,6 +778,17 @@ class IronicDriver(virt_driver.ComputeDriver):
@@ -834,6 +861,17 @@ class IronicDriver(virt_driver.ComputeDriver):
{'instance': instance.uuid,
'node': node_uuid})
self.destroy(context, instance, network_info)
@ -909,27 +788,21 @@ index b21c782..81dcdba 100644
+ available_images = [img['image_name'] for img in
+ multiboot_meta.get('elements', [])]
+ instance.metadata['available_images'] = str(available_images)
def _unprovision(self, ironicclient, instance, node):
"""This method is called from destroy() to unprovision
@@ -814,10 +864,7 @@ class IronicDriver(virt_driver.ComputeDriver):
# without raising any exceptions.
return
- if node.provision_state in (ironic_states.ACTIVE,
- ironic_states.DEPLOYFAIL,
- ironic_states.ERROR,
- ironic_states.DEPLOYWAIT):
+ if node.provision_state in _UNPROVISION_STATES:
self._unprovision(self.ironicclient, instance, node)
self._cleanup_deploy(context, node, instance, network_info)
@@ -1074,24 +1121,127 @@ class IronicDriver(virt_driver.ComputeDriver):
node_uuid = instance.node
node = self.ironicclient.call("node.get", node_uuid)
- self._add_driver_fields(node, instance, image_meta, instance.flavor,
- preserve_ephemeral)
@@ -1188,16 +1226,102 @@ class IronicDriver(virt_driver.ComputeDriver):
instance.task_state = task_states.REBUILD_SPAWNING
instance.save(expected_task_state=[task_states.REBUILDING])
- node_uuid = instance.node
- node = self.ironicclient.call("node.get", node_uuid)
+ # NOTE(oberezovskyi): Required to get real node uuid assigned to nova
+ # instance. Workaround after
+ # Change-Id: I0233f964d8f294f0ffd9edcb16b1aaf93486177f
+ node = self.ironicclient.call("node.get_by_instance_uuid",
+ instance.uuid)
+
+ # NOTE(lobur): set_provision_state to
+ # ACTIVE, REBUILD, and switch_boot_device are the only Ironic API
+ # calls where the user context needs to be passed to Ironic. This
@ -957,28 +830,32 @@ index b21c782..81dcdba 100644
+ recreate=recreate,
+ block_device_info=block_device_info,
+ preserve_ephemeral=preserve_ephemeral)
- self._add_driver_fields(node, instance, image_meta, instance.flavor,
- preserve_ephemeral)
+ self._get_switch_boot_options(context, instance, node.uuid)
- # Trigger the node rebuild/redeploy.
+ self._get_switch_boot_options(context, instance, node_uuid)
+
+ def _do_switch_boot_device(self, context, ironicclient, node, instance,
+ image_meta):
+ old_image_ref = node.instance_info.get("image_source", "")
+ try:
try:
- self.ironicclient.call("node.set_provision_state",
- node_uuid, ironic_states.REBUILD)
+ sb_user, sb_key = self._get_switch_boot_user_key(instance.metadata)
+ args = dict(ssh_user=sb_user,
+ ssh_key=sb_key,
+ image=image_meta['id'])
+ image=image_meta.id)
+ ironicclient.call("node.vendor_passthru",
+ node.uuid, "switch_boot",
+ args)
+ self.ironicclient.call("node.set_power_state", node.uuid, 'reboot')
+ self._update_driver_fields_after_switch_boot(
+ context, node, instance, image_meta)
+ except (exception.InvalidMetadata, # Bad Nova API call
+ exception.NovaException, # Retry failed
+ except (exception.InvalidMetadata, # Bad Nova API call
+ exception.NovaException, # Retry failed
+ ironic.exc.InternalServerError, # Validations
+ ironic.exc.BadRequest) as e: # Maintenance or no such API
+ ironic.exc.BadRequest) as e: # Maintenance or no such API
+ # Ironic Vendor API always return 200/400/500, so the only way
+ # to check the error is introspecting its message.
+ if "Already in desired boot device" in six.text_type(e):
@ -1004,7 +881,7 @@ index b21c782..81dcdba 100644
+ else:
+ raise exception.InvalidMetadata(
+ reason="To trigger switch boot device flow, both 'sb_user' "
+ "and 'sb_key' metadata params are required. To "
+ "and 'sb_key' metadata params are required. To "s
+ "trigger a standard rebuild flow, use "
+ "force_rebuild=True metadata flag.")
+
@ -1017,31 +894,17 @@ index b21c782..81dcdba 100644
+
+ self._add_driver_fields(node, instance, image_meta,
+ instance.flavor, preserve_ephemeral)
try:
- self.ironicclient.call("node.set_provision_state",
- node_uuid, ironic_states.REBUILD)
+ try:
+
+ ironicclient.call("node.set_provision_state",
+ node.uuid, ironic_states.REBUILD)
except (exception.NovaException, # Retry failed
ironic.exc.InternalServerError, # Validations
ironic.exc.BadRequest) as e: # Maintenance
msg = (_("Failed to request Ironic to rebuild instance "
- "%(inst)s: %(reason)s") % {'inst': instance.uuid,
- 'reason': six.text_type(e)})
+ "%(inst)s: %(reason)s") %
+ {'inst': instance.uuid,
+ 'reason': six.text_type(e)})
raise exception.InstanceDeployFailure(msg)
- # Although the target provision state is REBUILD, it will actually go
- # to ACTIVE once the redeploy is finished.
+ # Although the target provision state is REBUILD, it will
+ # actually go to ACTIVE once the redeploy is finished.
timer = loopingcall.FixedIntervalLoopingCall(self._wait_for_active,
self.ironicclient,
@@ -1213,3 +1337,22 @@ class IronicDriver(virt_driver.ComputeDriver):
instance)
timer.start(interval=CONF.ironic.api_retry_interval).wait()
LOG.info(_LI('Instance was successfully rebuilt'), instance=instance)
+
+ def _get_deploy_config_options(self, node, instance, image_meta):
+ # Taking into account previous options, if any. This is to support
@ -1051,7 +914,7 @@ index b21c782..81dcdba 100644
+ res = node.instance_info.get('deploy_config_options', {})
+
+ curr_options = {
+ 'image': image_meta.get('properties', {}).get('deploy_config', ''),
+ 'image': image_meta.properties.get('deploy_config', ''),
+ 'instance': instance.metadata.get('deploy_config', ''),
+ 'node': node.driver_info.get('deploy_config', ''),
+ }
@ -1061,21 +924,3 @@ index b21c782..81dcdba 100644
+ # Override previous by current.
+ res.update(curr_options)
+ return res
diff --git a/nova/virt/ironic/ironic_states.py b/nova/virt/ironic/ironic_states.py
index e521f16..a02ddcf 100644
--- a/nova/virt/ironic/ironic_states.py
+++ b/nova/virt/ironic/ironic_states.py
@@ -138,3 +138,13 @@ POWER_OFF = 'power off'
REBOOT = 'rebooting'
""" Node is rebooting. """
+
+##################
+# Helper constants
+##################
+
+PROVISION_STATE_LIST = (NOSTATE, MANAGEABLE, AVAILABLE, ACTIVE, DEPLOYWAIT,
+ DEPLOYING, DEPLOYFAIL, DEPLOYDONE, DELETING, DELETED,
+ CLEANING, CLEANFAIL, ERROR, REBUILD,
+ INSPECTING, INSPECTFAIL)
+""" A list of all provision states. """