EC2 metadata implementation
Change-Id: I7d3ba06c420e4f70cc01fd0ec64398a4c3405244
This commit is contained in:
parent
d8c2508624
commit
b27f4feab5
|
@ -188,7 +188,8 @@ class InstanceDescriber(common.TaggableItemsDescriber):
|
|||
self.reservations[reservation_id] = reservation
|
||||
if not instance['vpc_id']:
|
||||
self.reservation_os_groups[reservation_id] = (
|
||||
os_instance.security_groups)
|
||||
os_instance.security_groups
|
||||
if hasattr(os_instance, 'security_groups') else [])
|
||||
|
||||
self.reservation_instances[
|
||||
reservation['id']].append(formatted_instance)
|
||||
|
@ -211,7 +212,12 @@ class InstanceDescriber(common.TaggableItemsDescriber):
|
|||
|
||||
def get_os_items(self):
|
||||
self.novadb_instances = {}
|
||||
return clients.nova(self.context).servers.list()
|
||||
return clients.nova(self.context).servers.list(
|
||||
# NOTE(ft): these filters are needed for metadata server
|
||||
# which calls describe_instances with an admin account
|
||||
# (but project_id is substituted to an instance's one).
|
||||
search_opts={'all_tenants': self.context.cross_tenants,
|
||||
'project_id': self.context.project_id})
|
||||
|
||||
def auto_update_db(self, instance, os_instance):
|
||||
# TODO(ft): import and use instance_get_all_by_filters to
|
||||
|
@ -602,9 +608,12 @@ def _parse_image_parameters(context, image_id, kernel_id, ramdisk_id):
|
|||
# kind smarter
|
||||
def get_os_image(kind, ec2_image_id):
|
||||
try:
|
||||
ids = db_api.get_item_ids(context, kind, (ec2_image_id,))
|
||||
_id, os_image_id = ids[0]
|
||||
os_image = glance.images.get(os_image_id)
|
||||
images = db_api.get_public_items(context, kind, (ec2_image_id,))
|
||||
if images:
|
||||
image = images[0]
|
||||
else:
|
||||
image = db_api.get_item_by_id(context, kind, ec2_image_id)
|
||||
os_image = glance.images.get(image['os_id'])
|
||||
except (IndexError, glance_exception.HTTPNotFound):
|
||||
raise exception.InvalidAMIIDNotFound(id=ec2_image_id)
|
||||
return os_image
|
||||
|
|
|
@ -43,7 +43,7 @@ class RequestContext(object):
|
|||
is_admin=None, roles=None, remote_address=None,
|
||||
auth_token=None, user_name=None, project_name=None,
|
||||
overwrite=True, service_catalog=None, api_version=None,
|
||||
**kwargs):
|
||||
cross_tenants=None, **kwargs):
|
||||
"""Parameters
|
||||
|
||||
:param overwrite: Set to False to ensure that the greenthread local
|
||||
|
@ -79,6 +79,7 @@ class RequestContext(object):
|
|||
self.project_name = project_name
|
||||
self.is_admin = is_admin
|
||||
# TODO(ft): call policy.check_is_admin if is_admin is None
|
||||
self.cross_tenants = cross_tenants
|
||||
self.api_version = api_version
|
||||
if overwrite or not hasattr(local.store, 'context'):
|
||||
self.update_store()
|
||||
|
|
|
@ -39,11 +39,16 @@ CONF = cfg.CONF
|
|||
CONF.register_opts(exc_log_opts)
|
||||
|
||||
|
||||
class EC2ServerError(Exception):
|
||||
class EC2MetadataException(Exception):
|
||||
pass
|
||||
|
||||
def __init__(self, response, content):
|
||||
self.response = response
|
||||
self.content = content
|
||||
|
||||
class EC2MetadataNotFound(EC2MetadataException):
|
||||
pass
|
||||
|
||||
|
||||
class EC2MetadataInvalidAddress(EC2MetadataException):
|
||||
pass
|
||||
|
||||
|
||||
class EC2Exception(Exception):
|
||||
|
|
|
@ -14,18 +14,22 @@
|
|||
|
||||
import hashlib
|
||||
import hmac
|
||||
import posixpath
|
||||
import urlparse
|
||||
|
||||
import httplib2
|
||||
from keystoneclient.v2_0 import client as keystone_client
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
import webob
|
||||
|
||||
from ec2api import context as ec2context
|
||||
from ec2api import exception
|
||||
from ec2api.metadata import api
|
||||
from ec2api.openstack.common import gettextutils as textutils
|
||||
from ec2api.openstack.common.gettextutils import _
|
||||
from ec2api.openstack.common import log as logging
|
||||
from ec2api import utils
|
||||
from ec2api import wsgi
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -77,10 +81,27 @@ class MetadataRequestHandler(wsgi.Application):
|
|||
|
||||
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||
def __call__(self, req):
|
||||
try:
|
||||
LOG.debug("Request: %s", req)
|
||||
LOG.debug('Request: %s', req)
|
||||
|
||||
return self._proxy_request(req)
|
||||
path = req.path_info
|
||||
if path == '' or path[0] != '/':
|
||||
path = '/' + path
|
||||
path = posixpath.normpath(path)
|
||||
path_tokens = path.split('/')[1:]
|
||||
|
||||
if path_tokens in ([''], ['ec2']):
|
||||
resp = api.get_version_list()
|
||||
return self._add_response_data(req.response, resp)
|
||||
|
||||
try:
|
||||
if path_tokens[0] == 'openstack':
|
||||
return self._proxy_request(req)
|
||||
elif path_tokens[0] == 'ec2':
|
||||
path_tokens = path_tokens[1:]
|
||||
resp = self._get_metadata(req, path_tokens)
|
||||
return self._add_response_data(req.response, resp)
|
||||
except exception.EC2MetadataNotFound:
|
||||
return webob.exc.HTTPNotFound()
|
||||
except Exception:
|
||||
LOG.exception(textutils._LE("Unexpected error."))
|
||||
msg = _('An unknown error has occurred. '
|
||||
|
@ -89,8 +110,6 @@ class MetadataRequestHandler(wsgi.Application):
|
|||
|
||||
def _proxy_request(self, req):
|
||||
headers = self._build_proxy_request_headers(req)
|
||||
if not headers:
|
||||
return webob.exc.HTTPNotFound()
|
||||
nova_ip_port = '%s:%s' % (CONF.metadata.nova_metadata_ip,
|
||||
CONF.metadata.nova_metadata_port)
|
||||
url = urlparse.urlunsplit((
|
||||
|
@ -143,25 +162,24 @@ class MetadataRequestHandler(wsgi.Application):
|
|||
if req.headers.get('X-Instance-ID'):
|
||||
return req.headers
|
||||
|
||||
instance_ip = self._get_instance_ip(req)
|
||||
remote_ip = self._get_remote_ip(req)
|
||||
context = self._get_context()
|
||||
instance_id, project_id = api.get_instance_and_project_id(context,
|
||||
instance_ip)
|
||||
if not instance_id:
|
||||
return None
|
||||
|
||||
instance_id, project_id = (
|
||||
api.get_os_instance_and_project_id(context, remote_ip))
|
||||
return {
|
||||
'X-Forwarded-For': instance_ip,
|
||||
'X-Forwarded-For': remote_ip,
|
||||
'X-Instance-ID': instance_id,
|
||||
'X-Tenant-ID': project_id,
|
||||
'X-Instance-ID-Signature': self._sign_instance_id(instance_id),
|
||||
}
|
||||
|
||||
def _get_instance_ip(self, req):
|
||||
instance_ip = req.remote_addr
|
||||
def _get_remote_ip(self, req):
|
||||
remote_ip = req.remote_addr
|
||||
if CONF.use_forwarded_for:
|
||||
instance_ip = req.headers.get('X-Forwarded-For', instance_ip)
|
||||
return instance_ip
|
||||
remote_ip = req.headers.get('X-Forwarded-For', remote_ip)
|
||||
if not remote_ip:
|
||||
raise exception.EC2MetadataInvalidAddress()
|
||||
return remote_ip
|
||||
|
||||
def _get_context(self):
|
||||
# TODO(ft): make authentification token reusable
|
||||
|
@ -177,9 +195,81 @@ class MetadataRequestHandler(wsgi.Application):
|
|||
keystone.auth_tenant_id,
|
||||
None, None,
|
||||
auth_token=keystone.auth_token,
|
||||
service_catalog=service_catalog)
|
||||
service_catalog=service_catalog,
|
||||
is_admin=True,
|
||||
cross_tenants=True)
|
||||
|
||||
def _sign_instance_id(self, instance_id):
|
||||
return hmac.new(CONF.metadata.metadata_proxy_shared_secret,
|
||||
instance_id,
|
||||
hashlib.sha256).hexdigest()
|
||||
|
||||
def _get_metadata(self, req, path_tokens):
|
||||
context = self._get_context()
|
||||
if req.headers.get('X-Instance-ID'):
|
||||
os_instance_id, project_id, remote_ip = (
|
||||
self._unpack_request_attributes(req))
|
||||
else:
|
||||
remote_ip = self._get_remote_ip(req)
|
||||
os_instance_id, project_id = (
|
||||
api.get_os_instance_and_project_id(context, remote_ip))
|
||||
# NOTE(ft): substitute project_id for context to instance's one.
|
||||
# It's needed for correct describe and auto update DB operations.
|
||||
# It doesn't affect operations via OpenStack's clients because
|
||||
# these clients use auth_token field only
|
||||
context.project_id = project_id
|
||||
return api.get_metadata_item(context, path_tokens, os_instance_id,
|
||||
remote_ip)
|
||||
|
||||
def _unpack_request_attributes(self, req):
|
||||
os_instance_id = req.headers.get('X-Instance-ID')
|
||||
project_id = req.headers.get('X-Tenant-ID')
|
||||
signature = req.headers.get('X-Instance-ID-Signature')
|
||||
remote_ip = req.headers.get('X-Forwarded-For')
|
||||
|
||||
if not remote_ip:
|
||||
raise exception.EC2MetadataInvalidAddress()
|
||||
|
||||
if os_instance_id is None:
|
||||
msg = _('X-Instance-ID header is missing from request.')
|
||||
elif project_id is None:
|
||||
msg = _('X-Tenant-ID header is missing from request.')
|
||||
elif not isinstance(os_instance_id, six.string_types):
|
||||
msg = _('Multiple X-Instance-ID headers found within request.')
|
||||
elif not isinstance(project_id, six.string_types):
|
||||
msg = _('Multiple X-Tenant-ID headers found within request.')
|
||||
else:
|
||||
msg = None
|
||||
|
||||
if msg:
|
||||
raise webob.exc.HTTPBadRequest(explanation=msg)
|
||||
|
||||
expected_signature = hmac.new(
|
||||
CONF.metadata.metadata_proxy_shared_secret,
|
||||
os_instance_id,
|
||||
hashlib.sha256).hexdigest()
|
||||
|
||||
if not utils.constant_time_compare(expected_signature, signature):
|
||||
LOG.warning(textutils._LW(
|
||||
'X-Instance-ID-Signature: %(signature)s does '
|
||||
'not match the expected value: '
|
||||
'%(expected_signature)s for id: '
|
||||
'%(instance_id)s. Request From: '
|
||||
'%(remote_ip)s'),
|
||||
{'signature': signature,
|
||||
'expected_signature': expected_signature,
|
||||
'instance_id': os_instance_id,
|
||||
'remote_ip': remote_ip})
|
||||
|
||||
msg = _('Invalid proxy request signature.')
|
||||
raise webob.exc.HTTPForbidden(explanation=msg)
|
||||
|
||||
return os_instance_id, project_id, remote_ip
|
||||
|
||||
def _add_response_data(self, response, data):
|
||||
if isinstance(data, six.text_type):
|
||||
response.text = data
|
||||
else:
|
||||
response.body = data
|
||||
response.content_type = 'text/plain'
|
||||
return response
|
||||
|
|
|
@ -17,24 +17,260 @@ import itertools
|
|||
from novaclient import exceptions as nova_exception
|
||||
|
||||
from ec2api.api import clients
|
||||
from ec2api.api import ec2utils
|
||||
from ec2api.api import instance as instance_api
|
||||
from ec2api import exception
|
||||
from ec2api.novadb import api as novadb
|
||||
from ec2api.openstack.common.gettextutils import _
|
||||
from ec2api.openstack.common import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
VERSIONS = [
|
||||
'1.0',
|
||||
'2007-01-19',
|
||||
'2007-03-01',
|
||||
'2007-08-29',
|
||||
'2007-10-10',
|
||||
'2007-12-15',
|
||||
'2008-02-01',
|
||||
'2008-09-01',
|
||||
'2009-04-04',
|
||||
]
|
||||
|
||||
VERSION_DATA = {
|
||||
'1.0': ['ami-id',
|
||||
'ami-launch-index',
|
||||
'ami-manifest-path',
|
||||
'hostname',
|
||||
'instance-id',
|
||||
'local-ipv4',
|
||||
'public-keys',
|
||||
'reservation-id',
|
||||
'security-groups'],
|
||||
'2007-01-19': ['local-hostname',
|
||||
'public-hostname',
|
||||
'public-ipv4'],
|
||||
'2007-03-01': ['product-codes'],
|
||||
'2007-08-29': ['instance-type'],
|
||||
'2007-10-10': ['ancestor-ami-ids',
|
||||
'ramdisk-id'],
|
||||
'2007-12-15': ['block-device-mapping'],
|
||||
'2008-02-01': ['kernel-id',
|
||||
'placement'],
|
||||
'2008-09-01': ['instance-action'],
|
||||
'2009-04-04': [],
|
||||
}
|
||||
|
||||
|
||||
def get_instance_and_project_id(context, fixed_ip):
|
||||
nova = clients.nova(context)
|
||||
def get_version_list():
|
||||
return _format_metadata_item(VERSIONS + ["latest"])
|
||||
|
||||
|
||||
def get_os_instance_and_project_id(context, fixed_ip):
|
||||
try:
|
||||
nova = clients.nova(context)
|
||||
os_address = nova.fixed_ips.get(fixed_ip)
|
||||
except nova_exception.NotFound:
|
||||
return None, None
|
||||
if not os_address.hostname:
|
||||
return None, None
|
||||
os_instances = nova.servers.list(
|
||||
search_opts={'hostname': os_address.hostname,
|
||||
'all_tenants': True})
|
||||
return next((os_instance.id, os_instance.tenant_id)
|
||||
for os_instance in os_instances
|
||||
if any((addr['addr'] == fixed_ip and
|
||||
addr['OS-EXT-IPS:type'] == 'fixed')
|
||||
for addr in itertools.chain(
|
||||
*os_instance.addresses.itervalues())))
|
||||
except (nova_exception.NotFound, StopIteration):
|
||||
raise exception.EC2MetadataNotFound()
|
||||
|
||||
os_instances = nova.servers.list(
|
||||
search_opts={'hostname': os_address.hostname,
|
||||
'all_tenants': True})
|
||||
for os_instance in os_instances:
|
||||
if any(addr['addr'] == fixed_ip and addr['OS-EXT-IPS:type'] == 'fixed'
|
||||
for addr in itertools.chain(
|
||||
*os_instance.addresses.itervalues())):
|
||||
return os_instance.id, os_instance.tenant_id
|
||||
|
||||
return None, None
|
||||
def get_metadata_item(context, path_tokens, os_instance_id, remote_ip):
|
||||
version = path_tokens[0]
|
||||
if version == "latest":
|
||||
version = VERSIONS[-1]
|
||||
elif version not in VERSIONS:
|
||||
raise exception.EC2MetadataNotFound()
|
||||
|
||||
ec2_instance, ec2_reservation = (
|
||||
_get_ec2_instance_and_reservation(context, os_instance_id))
|
||||
# NOTE(ft): check for case of Neutron metadata proxy.
|
||||
# It sends project_id as X-Tenant-ID HTTP header. We make sure it's correct
|
||||
if context.project_id != ec2_reservation['ownerId']:
|
||||
LOG.warning(_('Tenant_id %(tenant_id)s does not match tenant_id '
|
||||
'of instance %(instance_id)s.'),
|
||||
{'tenant_id': context.project_id,
|
||||
'instance_id': os_instance_id})
|
||||
raise exception.EC2MetadataNotFound()
|
||||
|
||||
metadata = _build_metadata(context, ec2_instance, ec2_reservation,
|
||||
os_instance_id, remote_ip)
|
||||
# TODO(ft): cache built metadata
|
||||
metadata = _cut_down_to_version(metadata, version)
|
||||
metadata_item = _find_path_in_tree(metadata, path_tokens[1:])
|
||||
return _format_metadata_item(metadata_item)
|
||||
|
||||
|
||||
def _get_ec2_instance_and_reservation(context, os_instance_id):
|
||||
instance_id = ec2utils.os_id_to_ec2_id(context, 'i', os_instance_id)
|
||||
try:
|
||||
ec2_reservations = instance_api.describe_instances(
|
||||
context, [instance_id])
|
||||
except exception.InvalidInstanceIDNotFound:
|
||||
ec2_reservations = instance_api.describe_instances(
|
||||
context, filter=[{'name': 'instance-id',
|
||||
'value': [instance_id]}])
|
||||
if (len(ec2_reservations['reservationSet']) != 1 or
|
||||
len(ec2_reservations['reservationSet'][0]['instancesSet']) != 1):
|
||||
LOG.error(_('Failed to get metadata for instance id: %s'),
|
||||
os_instance_id)
|
||||
raise exception.EC2MetadataNotFound()
|
||||
|
||||
ec2_reservation = ec2_reservations['reservationSet'][0]
|
||||
ec2_instance = ec2_reservation['instancesSet'][0]
|
||||
|
||||
return ec2_instance, ec2_reservation
|
||||
|
||||
|
||||
def _build_metadata(context, ec2_instance, ec2_reservation,
|
||||
os_instance_id, remote_ip):
|
||||
metadata = {
|
||||
'ami-id': ec2_instance['imageId'],
|
||||
'ami-launch-index': ec2_instance['amiLaunchIndex'],
|
||||
# NOTE (ft): the fake value as it is in Nova EC2 metadata
|
||||
'ami-manifest-path': 'FIXME',
|
||||
# NOTE (ft): empty value as it is in Nova EC2 metadata
|
||||
'ancestor-ami-ids': [],
|
||||
'block-device-mapping': _build_block_device_mappings(context,
|
||||
ec2_instance,
|
||||
os_instance_id),
|
||||
# NOTE(ft): Nova EC2 metadata returns instance's hostname with
|
||||
# dhcp_domain suffix if it's set in config.
|
||||
# But i don't see any reason to return a hostname differs from EC2
|
||||
# describe output one. If we need to consider dhcp_domain suffix
|
||||
# then we should do it in the describe operation
|
||||
'hostname': ec2_instance['privateDnsName'],
|
||||
# NOTE (ft): the fake value as it is in Nova EC2 metadata
|
||||
'instance-action': 'none',
|
||||
'instance-id': ec2_instance['instanceId'],
|
||||
'instance-type': ec2_instance['instanceType'],
|
||||
'local-hostname': ec2_instance['privateDnsName'],
|
||||
'local-ipv4': ec2_instance['privateIpAddress'] or remote_ip,
|
||||
'placement': {
|
||||
'availability-zone': ec2_instance['placement']['availabilityZone']
|
||||
},
|
||||
# NOTE (ft): empty value as it is in Nova EC2 metadata
|
||||
'product-codes': [],
|
||||
'public-hostname': ec2_instance['dnsName'],
|
||||
'public-ipv4': ec2_instance.get('ipAddress', ''),
|
||||
'reservation-id': ec2_reservation['reservationId'],
|
||||
'security-groups': [sg['groupName']
|
||||
for sg in ec2_reservation.get('groupSet', [])],
|
||||
}
|
||||
if 'kernelId' in ec2_instance:
|
||||
metadata['kernel-id'] = ec2_instance['kernelId']
|
||||
if 'ramdiskId' in ec2_instance:
|
||||
metadata['ramdisk-id'] = ec2_instance['ramdiskId']
|
||||
# public keys are strangely rendered in ec2 metadata service
|
||||
# meta-data/public-keys/ returns '0=keyname' (with no trailing /)
|
||||
# and only if there is a public key given.
|
||||
# '0=keyname' means there is a normally rendered dict at
|
||||
# meta-data/public-keys/0
|
||||
#
|
||||
# meta-data/public-keys/ : '0=%s' % keyname
|
||||
# meta-data/public-keys/0/ : 'openssh-key'
|
||||
# meta-data/public-keys/0/openssh-key : '%s' % publickey
|
||||
if ec2_instance['keyName']:
|
||||
novadb_instance = novadb.instance_get_by_uuid(context, os_instance_id)
|
||||
metadata['public-keys'] = {
|
||||
'0': {'_name': "0=" + ec2_instance['keyName'],
|
||||
'openssh-key': novadb_instance['key_data']}}
|
||||
|
||||
full_metadata = {'meta-data': metadata}
|
||||
|
||||
userdata = instance_api.describe_instance_attribute(
|
||||
context, ec2_instance['instanceId'], 'userData')
|
||||
if 'userData' in userdata:
|
||||
full_metadata['user-data'] = userdata['userData']['value']
|
||||
|
||||
return full_metadata
|
||||
|
||||
|
||||
def _build_block_device_mappings(context, ec2_instance, os_instance_id):
|
||||
mappings = {'ami': ec2_instance['rootDeviceName'],
|
||||
'root': instance_api._block_device_strip_dev(
|
||||
ec2_instance['rootDeviceName'])}
|
||||
if 'blockDeviceMapping' in ec2_instance:
|
||||
# NOTE(yamahata): I'm not sure how ebs device should be numbered.
|
||||
# Right now sort by device name for deterministic
|
||||
# result.
|
||||
ebs_devices = [ebs['deviceName']
|
||||
for ebs in ec2_instance['blockDeviceMapping']]
|
||||
ebs_devices.sort()
|
||||
ebs_devices = dict(('ebs%d' % num, ebs)
|
||||
for num, ebs in enumerate(ebs_devices))
|
||||
mappings.update(ebs_devices)
|
||||
|
||||
bdms = novadb.block_device_mapping_get_all_by_instance(context,
|
||||
os_instance_id)
|
||||
ephemerals = dict(('ephemeral%d' % num, eph['device_name'])
|
||||
for num, eph in enumerate(
|
||||
eph for eph in bdms
|
||||
if (eph['source_type'] == 'blank' and
|
||||
eph['quest_format'] != 'swap')))
|
||||
mappings.update(ephemerals)
|
||||
|
||||
swap = next((swap['device_name'] for swap in bdms
|
||||
if (swap['source_type'] == 'blank' and
|
||||
swap['quest_format'] == 'swap')), None)
|
||||
if swap:
|
||||
mappings['swap'] = swap
|
||||
|
||||
return mappings
|
||||
|
||||
|
||||
def _cut_down_to_version(metadata, version):
|
||||
version_number = VERSIONS.index(version) + 1
|
||||
if version_number == len(VERSIONS):
|
||||
return metadata
|
||||
return dict((attr, metadata[attr])
|
||||
for attr in itertools.chain(
|
||||
*(VERSION_DATA[ver]
|
||||
for ver in VERSIONS[:version_number]))
|
||||
if attr in metadata)
|
||||
|
||||
|
||||
def _format_metadata_item(data):
|
||||
if isinstance(data, dict):
|
||||
output = ''
|
||||
for key in sorted(data.keys()):
|
||||
if key == '_name':
|
||||
continue
|
||||
if isinstance(data[key], dict):
|
||||
if '_name' in data[key]:
|
||||
output += str(data[key]['_name'])
|
||||
else:
|
||||
output += key + '/'
|
||||
else:
|
||||
output += key
|
||||
|
||||
output += '\n'
|
||||
return output[:-1]
|
||||
elif isinstance(data, list):
|
||||
return '\n'.join(data)
|
||||
else:
|
||||
return str(data)
|
||||
|
||||
|
||||
def _find_path_in_tree(data, path_tokens):
|
||||
# given a dict/list tree, and a path in that tree, return data found there.
|
||||
for i in range(0, len(path_tokens)):
|
||||
if isinstance(data, dict) or isinstance(data, list):
|
||||
if path_tokens[i] in data:
|
||||
data = data[path_tokens[i]]
|
||||
else:
|
||||
raise exception.EC2MetadataNotFound()
|
||||
else:
|
||||
if i != len(path_tokens) - 1:
|
||||
raise exception.EC2MetadataNotFound()
|
||||
data = data[path_tokens[i]]
|
||||
return data
|
||||
|
|
|
@ -171,6 +171,9 @@ ID_EC2_SECURITY_GROUP_2 = random_ec2_id('sg')
|
|||
ID_OS_SECURITY_GROUP_1 = random_os_id()
|
||||
ID_OS_SECURITY_GROUP_2 = random_os_id()
|
||||
|
||||
NAME_DEFAULT_OS_SECURITY_GROUP = 'default'
|
||||
NAME_OTHER_OS_SECURITY_GROUP = 'other'
|
||||
|
||||
|
||||
# route table constants
|
||||
ID_EC2_ROUTE_TABLE_1 = random_ec2_id('rtb')
|
||||
|
@ -212,6 +215,46 @@ ID_OS_SNAPSHOT_1 = random_os_id()
|
|||
ID_OS_SNAPSHOT_2 = random_os_id()
|
||||
|
||||
|
||||
# key pair constans
|
||||
NAME_KEY_PAIR = 'keyname'
|
||||
PRIVATE_KEY_KEY_PAIR = (
|
||||
'-----BEGIN RSA PRIVATE KEY-----\n'
|
||||
'MIIEowIBAAKCAQEAgXvm1sZ9MDiAXvGraRFja0/WqyJ1gE6j/QPjreNryd34zBFcv2pQXLyvb'
|
||||
'gQG\nFxN4rMGNScgKgLSgHjE/TNywkT8N7aYOiRmGkzQciP5t+zf8ZdCyl+hqgoQig1uY8sV/'
|
||||
'fSxUWCB9\n8sF7Tpl0iGkWM6Wo0H/PvcwiS2+UPSzArj+b+Erb/JbBF4O8GgSmtLMeq60RuDM'
|
||||
'dJi5JYCP66HUw\njtYb/f9y1Q9nEGVcxY2v0RI1n0yOaZDKPInLKHeR/ole2QVwPZB69mBj11'
|
||||
'LErqb+jzCaSivnhy6g\nPzaSHdZaRmy1f+6ltFI1iKt+4y/iINOY0skYC1hc7IevE7j7dGQTD'
|
||||
'wIDAQABAoIBAEbD2Vfd6MM2\nzemVuHFWoHggjRjAX2k9EWCRBJifJuSPXI7imka+qqbUNCgz'
|
||||
'KMTpzlTT/wyouBy5Gp0Fmyu9nP30\ncP9FdsI04hiHLWUtcBwQ7+8RDNn6mmM0JcyWfdOIXnG'
|
||||
'hjYMQVuUaGvLM6SQ4EnsteUJh57451zBV\nDbYVRES2Fbq+j8tPQj1KuD0HhZBboNPOxo6E5n'
|
||||
'TxvMXnvuI+cb9D99lqATcb8c0zsLMl/5SKEBDc\nj72X4GPfE3Dc5/MO6L/89ms3TqF3lx8lh'
|
||||
'wFSMfFfA3Nf5xrX3gnorGe81odXBXFveqMCemvfJYxg\nS9KPkM8CMnwn6yPS3ftW5xH3nMkC'
|
||||
'gYEAvN4lQuOTy9RONCtfgZ6lhR00xfDiibOsE2jFXqXlXrZS\nunBx2WRwNuhAcYGbC4T71iC'
|
||||
'BR+LJHECpFjEFX9cKjd8xZPdIzJmwMBylPnli8IxK9UMroxF/MDNy\nnJfdPIWagIrk9VRsQH'
|
||||
'UOQW8Ab5dYJuP6c03L5xwmnFfeFnlz10MCgYEAr4Iu182bC2ppwr5AYD8T\n/QKVPZTmizbtG'
|
||||
'H/7a2+WnfNCz2u0MOo2h1rF7/SOYR8nalTTsN1z4D8cRX7YQ0P4yBtNRNiN7WH3\n+smTWztI'
|
||||
'VYvJA2RsOeP0zfGLJiFSMWLOjlqpJ7KbkEuPcxshGd+/w8upxgJeV8Dwz0ZWbY302kUC\ngYE'
|
||||
'AhneTB+CHpaNuWm5W/S46ol9850DtySSG6vq5Kv3qJFii5eKQ7Do6Op145FdmT/lKY9WYtdmd'
|
||||
'\nXeQbfpVAQlAUT5YM0NnOlv0FF/wNGkHKU4FPDPfZ5avbZjH688qb1S86JTK+eHy25d1xXNz'
|
||||
'u7oRO\nWsIN2nIVLmI4iy90C4RFGYkCgYBXpKPtwk/VkItF46nUJku+Agcy3GOQS5p0rJyJ1w'
|
||||
'yYzbykRf2S\nm7MlPpAvtqlPGLafI8MexEe0SO++SIyIcq4Oh4u7gITHcS/bfcPnQCBsD8UOu'
|
||||
'5xMAGjkWuWI4gTg\ngp3xepaUK14B3anB6l9KQ3DIvrCGH/Kq0b+vUkmgpc4LHQKBgBtul9bN'
|
||||
'KLF+LJf4JHYNFSurE8Y/\nn8FZ3dZo3T0Q3Sap9bP3ZHemoQ6QXbmpu3H4Mf+2kcNg6YKFW3p'
|
||||
'hxW3cuAcZOMHPCrpr3mCdyhF0\nKM74ANEwg8MekBJTcWZUNFv9HZDvTuhp6HSrbMnNEQogkd'
|
||||
'5PoubiusvAKpeb6NBGnLMq\n'
|
||||
'-----END RSA PRIVATE KEY-----'
|
||||
)
|
||||
PUBLIC_KEY_KEY_PAIR = (
|
||||
'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDIkYwwXm8UeQXx1c2eFrDIB6b'
|
||||
'6ApI0KTKs1wezDfFdSIs93vAt4Jx1MyaR/PwqwLk2CDyFoGJBWBI9YcodLAjoRg'
|
||||
'Ovr6JigEv5V3yp+eEkeAJO0cPA21vN/KQ8Vxml68ZvvqbdqKZXc/rpFZ1OgCmHt'
|
||||
'udo96uQiRB0FM3mdE8YOTswcfkJxTvCe3axX50pYXXfIb0dn9CzC1hyQWYPXvlv'
|
||||
'qFNvr/Li7sSBycTBAh4Ar/uEigs/uOjhvzd7GpzY7qDqBVJFAmP7HiiOxoXPkKu'
|
||||
'W62Ftd')
|
||||
FINGERPRINT_KEY_PAIR = (
|
||||
'2a:72:dd:aa:0d:a6:45:4d:27:4f:75:28:73:0d:a6:10:35:88:e1:ce')
|
||||
|
||||
|
||||
# Object constants section
|
||||
# Constant name notation:
|
||||
# [<subtype>]<object_name>
|
||||
|
@ -435,10 +478,11 @@ DB_INSTANCE_2 = {
|
|||
NOVADB_INSTANCE_1 = {
|
||||
'reservation_id': random_ec2_id('r'),
|
||||
'launch_index': 0,
|
||||
'kernel_id': None,
|
||||
'ramdisk_id': None,
|
||||
'kernel_id': ID_OS_IMAGE_AKI_1,
|
||||
'ramdisk_id': ID_OS_IMAGE_ARI_1,
|
||||
'root_device_name': '/dev/vda',
|
||||
'hostname': ID_EC2_INSTANCE_1,
|
||||
'key_data': PUBLIC_KEY_KEY_PAIR,
|
||||
}
|
||||
NOVADB_INSTANCE_2 = {
|
||||
'reservation_id': ID_EC2_RESERVATION_2,
|
||||
|
@ -515,9 +559,11 @@ EC2_INSTANCE_1 = {
|
|||
'dnsName': IP_ADDRESS_2,
|
||||
'instanceState': {'code': 0, 'name': 'pending'},
|
||||
'imageId': ID_EC2_IMAGE_1,
|
||||
'kernelId': ID_EC2_IMAGE_AKI_1,
|
||||
'ramdiskId': ID_EC2_IMAGE_ARI_1,
|
||||
'productCodesSet': [],
|
||||
'privateDnsName': ID_EC2_INSTANCE_1,
|
||||
'keyName': None,
|
||||
'keyName': NAME_KEY_PAIR,
|
||||
'launchTime': None,
|
||||
'rootDeviceType': 'instance-store',
|
||||
'instanceType': 'fake_flavor',
|
||||
|
@ -548,7 +594,8 @@ EC2_RESERVATION_1 = {
|
|||
EC2_RESERVATION_2 = {
|
||||
'reservationId': ID_EC2_RESERVATION_2,
|
||||
'ownerId': ID_OS_PROJECT,
|
||||
'groupSet': [],
|
||||
'groupSet': [{'groupName': NAME_DEFAULT_OS_SECURITY_GROUP},
|
||||
{'groupName': NAME_OTHER_OS_SECURITY_GROUP}],
|
||||
'instancesSet': [EC2_INSTANCE_2],
|
||||
}
|
||||
|
||||
|
@ -593,9 +640,12 @@ OS_INSTANCE_1 = OSInstance(
|
|||
{'addr': IP_ADDRESS_2,
|
||||
'version': 4,
|
||||
'OS-EXT-IPS:type': 'floating'}]},
|
||||
key_name=NAME_KEY_PAIR,
|
||||
)
|
||||
OS_INSTANCE_2 = OSInstance(
|
||||
ID_OS_INSTANCE_2, {'id': 'fakeFlavorId'})
|
||||
ID_OS_INSTANCE_2, {'id': 'fakeFlavorId'},
|
||||
security_groups=[{'name': NAME_DEFAULT_OS_SECURITY_GROUP},
|
||||
{'name': NAME_OTHER_OS_SECURITY_GROUP}])
|
||||
|
||||
# DHCP options objects
|
||||
DB_DHCP_OPTIONS_1 = {'id': ID_EC2_DHCP_OPTIONS_1,
|
||||
|
@ -1047,6 +1097,16 @@ DB_IMAGE_2 = {
|
|||
'os_id': ID_OS_IMAGE_2,
|
||||
'is_public': True,
|
||||
}
|
||||
DB_IMAGE_AKI_1 = {
|
||||
'id': ID_EC2_IMAGE_AKI_1,
|
||||
'os_id': ID_OS_IMAGE_AKI_1,
|
||||
'is_public': True,
|
||||
}
|
||||
DB_IMAGE_ARI_1 = {
|
||||
'id': ID_EC2_IMAGE_ARI_1,
|
||||
'os_id': ID_OS_IMAGE_ARI_1,
|
||||
'is_public': True,
|
||||
}
|
||||
|
||||
OS_IMAGE_1 = {
|
||||
'id': ID_OS_IMAGE_1,
|
||||
|
@ -1182,95 +1242,6 @@ OS_SNAPSHOT_2 = {
|
|||
}
|
||||
|
||||
|
||||
# availability zone objects
|
||||
|
||||
class NovaAvailabilityZone(object):
|
||||
|
||||
def __init__(self, nova_availability_zone_dict):
|
||||
self.zoneName = nova_availability_zone_dict['zoneName']
|
||||
self.zoneState = {'available':
|
||||
nova_availability_zone_dict['zoneState'] == 'available'}
|
||||
self.hosts = nova_availability_zone_dict['hosts']
|
||||
|
||||
OS_AVAILABILITY_ZONE = {'zoneName': 'nova',
|
||||
'zoneState': 'available',
|
||||
'hosts': {'host1': {'service1': {
|
||||
'active': 'True',
|
||||
'available': 'True',
|
||||
'updated_at': 'now'},
|
||||
'service2': {
|
||||
'active': 'False',
|
||||
'available': 'False',
|
||||
'updated_at': 'now'}},
|
||||
'host2': {'service1': {
|
||||
'active': 'True',
|
||||
'available': 'True',
|
||||
'updated_at': 'now'}}
|
||||
}}
|
||||
OS_AVAILABILITY_ZONE_INTERNAL = {'zoneName': 'internal',
|
||||
'zoneState': 'available',
|
||||
'hosts': {}}
|
||||
EC2_AVAILABILITY_ZONE = {'zoneName': 'nova',
|
||||
'zoneState': 'available'}
|
||||
|
||||
|
||||
# keypair objects
|
||||
|
||||
class NovaKeyPair(object):
|
||||
|
||||
def __init__(self, nova_keypair_dict):
|
||||
self.name = nova_keypair_dict['name']
|
||||
self.fingerprint = nova_keypair_dict['fingerprint']
|
||||
self.private_key = nova_keypair_dict['private_key']
|
||||
self.public_key = nova_keypair_dict['public_key']
|
||||
|
||||
PRIVATE_KEY = (
|
||||
'-----BEGIN RSA PRIVATE KEY-----\n'
|
||||
'MIIEowIBAAKCAQEAgXvm1sZ9MDiAXvGraRFja0/WqyJ1gE6j/QPjreNryd34zBFcv2pQXLyvb'
|
||||
'gQG\nFxN4rMGNScgKgLSgHjE/TNywkT8N7aYOiRmGkzQciP5t+zf8ZdCyl+hqgoQig1uY8sV/'
|
||||
'fSxUWCB9\n8sF7Tpl0iGkWM6Wo0H/PvcwiS2+UPSzArj+b+Erb/JbBF4O8GgSmtLMeq60RuDM'
|
||||
'dJi5JYCP66HUw\njtYb/f9y1Q9nEGVcxY2v0RI1n0yOaZDKPInLKHeR/ole2QVwPZB69mBj11'
|
||||
'LErqb+jzCaSivnhy6g\nPzaSHdZaRmy1f+6ltFI1iKt+4y/iINOY0skYC1hc7IevE7j7dGQTD'
|
||||
'wIDAQABAoIBAEbD2Vfd6MM2\nzemVuHFWoHggjRjAX2k9EWCRBJifJuSPXI7imka+qqbUNCgz'
|
||||
'KMTpzlTT/wyouBy5Gp0Fmyu9nP30\ncP9FdsI04hiHLWUtcBwQ7+8RDNn6mmM0JcyWfdOIXnG'
|
||||
'hjYMQVuUaGvLM6SQ4EnsteUJh57451zBV\nDbYVRES2Fbq+j8tPQj1KuD0HhZBboNPOxo6E5n'
|
||||
'TxvMXnvuI+cb9D99lqATcb8c0zsLMl/5SKEBDc\nj72X4GPfE3Dc5/MO6L/89ms3TqF3lx8lh'
|
||||
'wFSMfFfA3Nf5xrX3gnorGe81odXBXFveqMCemvfJYxg\nS9KPkM8CMnwn6yPS3ftW5xH3nMkC'
|
||||
'gYEAvN4lQuOTy9RONCtfgZ6lhR00xfDiibOsE2jFXqXlXrZS\nunBx2WRwNuhAcYGbC4T71iC'
|
||||
'BR+LJHECpFjEFX9cKjd8xZPdIzJmwMBylPnli8IxK9UMroxF/MDNy\nnJfdPIWagIrk9VRsQH'
|
||||
'UOQW8Ab5dYJuP6c03L5xwmnFfeFnlz10MCgYEAr4Iu182bC2ppwr5AYD8T\n/QKVPZTmizbtG'
|
||||
'H/7a2+WnfNCz2u0MOo2h1rF7/SOYR8nalTTsN1z4D8cRX7YQ0P4yBtNRNiN7WH3\n+smTWztI'
|
||||
'VYvJA2RsOeP0zfGLJiFSMWLOjlqpJ7KbkEuPcxshGd+/w8upxgJeV8Dwz0ZWbY302kUC\ngYE'
|
||||
'AhneTB+CHpaNuWm5W/S46ol9850DtySSG6vq5Kv3qJFii5eKQ7Do6Op145FdmT/lKY9WYtdmd'
|
||||
'\nXeQbfpVAQlAUT5YM0NnOlv0FF/wNGkHKU4FPDPfZ5avbZjH688qb1S86JTK+eHy25d1xXNz'
|
||||
'u7oRO\nWsIN2nIVLmI4iy90C4RFGYkCgYBXpKPtwk/VkItF46nUJku+Agcy3GOQS5p0rJyJ1w'
|
||||
'yYzbykRf2S\nm7MlPpAvtqlPGLafI8MexEe0SO++SIyIcq4Oh4u7gITHcS/bfcPnQCBsD8UOu'
|
||||
'5xMAGjkWuWI4gTg\ngp3xepaUK14B3anB6l9KQ3DIvrCGH/Kq0b+vUkmgpc4LHQKBgBtul9bN'
|
||||
'KLF+LJf4JHYNFSurE8Y/\nn8FZ3dZo3T0Q3Sap9bP3ZHemoQ6QXbmpu3H4Mf+2kcNg6YKFW3p'
|
||||
'hxW3cuAcZOMHPCrpr3mCdyhF0\nKM74ANEwg8MekBJTcWZUNFv9HZDvTuhp6HSrbMnNEQogkd'
|
||||
'5PoubiusvAKpeb6NBGnLMq\n'
|
||||
'-----END RSA PRIVATE KEY-----'
|
||||
)
|
||||
|
||||
PUBLIC_KEY = ('ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDIkYwwXm8UeQXx1c2eFrDIB6b'
|
||||
'6ApI0KTKs1wezDfFdSIs93vAt4Jx1MyaR/PwqwLk2CDyFoGJBWBI9YcodLAjoRg'
|
||||
'Ovr6JigEv5V3yp+eEkeAJO0cPA21vN/KQ8Vxml68ZvvqbdqKZXc/rpFZ1OgCmHt'
|
||||
'udo96uQiRB0FM3mdE8YOTswcfkJxTvCe3axX50pYXXfIb0dn9CzC1hyQWYPXvlv'
|
||||
'qFNvr/Li7sSBycTBAh4Ar/uEigs/uOjhvzd7GpzY7qDqBVJFAmP7HiiOxoXPkKu'
|
||||
'W62Ftd')
|
||||
|
||||
KEY_FINGERPRINT = '2a:72:dd:aa:0d:a6:45:4d:27:4f:75:28:73:0d:a6:10:35:88:e1:ce'
|
||||
|
||||
OS_KEY_PAIR = {'name': 'keyname',
|
||||
'private_key': PRIVATE_KEY,
|
||||
'public_key': PUBLIC_KEY,
|
||||
'fingerprint': KEY_FINGERPRINT}
|
||||
|
||||
EC2_KEY_PAIR = {'keyName': 'keyname',
|
||||
'keyFingerprint': KEY_FINGERPRINT,
|
||||
'keyMaterial': PRIVATE_KEY}
|
||||
|
||||
|
||||
# volume objects
|
||||
class CinderVolume(object):
|
||||
|
||||
|
@ -1373,6 +1344,59 @@ OS_VOLUME_3 = {
|
|||
}
|
||||
|
||||
|
||||
# availability zone objects
|
||||
|
||||
class NovaAvailabilityZone(object):
|
||||
|
||||
def __init__(self, nova_availability_zone_dict):
|
||||
self.zoneName = nova_availability_zone_dict['zoneName']
|
||||
self.zoneState = {'available':
|
||||
nova_availability_zone_dict['zoneState'] == 'available'}
|
||||
self.hosts = nova_availability_zone_dict['hosts']
|
||||
|
||||
OS_AVAILABILITY_ZONE = {'zoneName': 'nova',
|
||||
'zoneState': 'available',
|
||||
'hosts': {'host1': {'service1': {
|
||||
'active': 'True',
|
||||
'available': 'True',
|
||||
'updated_at': 'now'},
|
||||
'service2': {
|
||||
'active': 'False',
|
||||
'available': 'False',
|
||||
'updated_at': 'now'}},
|
||||
'host2': {'service1': {
|
||||
'active': 'True',
|
||||
'available': 'True',
|
||||
'updated_at': 'now'}}
|
||||
}}
|
||||
OS_AVAILABILITY_ZONE_INTERNAL = {'zoneName': 'internal',
|
||||
'zoneState': 'available',
|
||||
'hosts': {}}
|
||||
EC2_AVAILABILITY_ZONE = {'zoneName': 'nova',
|
||||
'zoneState': 'available'}
|
||||
|
||||
|
||||
# keypair objects
|
||||
|
||||
class NovaKeyPair(object):
|
||||
|
||||
def __init__(self, nova_keypair_dict):
|
||||
self.name = nova_keypair_dict['name']
|
||||
self.fingerprint = nova_keypair_dict['fingerprint']
|
||||
self.private_key = nova_keypair_dict['private_key']
|
||||
self.public_key = nova_keypair_dict['public_key']
|
||||
|
||||
|
||||
OS_KEY_PAIR = {'name': NAME_KEY_PAIR,
|
||||
'private_key': PRIVATE_KEY_KEY_PAIR,
|
||||
'public_key': PUBLIC_KEY_KEY_PAIR,
|
||||
'fingerprint': FINGERPRINT_KEY_PAIR}
|
||||
|
||||
EC2_KEY_PAIR = {'keyName': NAME_KEY_PAIR,
|
||||
'keyFingerprint': FINGERPRINT_KEY_PAIR,
|
||||
'keyMaterial': PRIVATE_KEY_KEY_PAIR}
|
||||
|
||||
|
||||
# Object generator functions section
|
||||
|
||||
# internet gateway generator functions
|
||||
|
@ -1459,7 +1483,8 @@ def gen_os_port(os_id, ec2_network_interface, os_subnet_id, fixed_ips,
|
|||
# instance generator functions
|
||||
def gen_ec2_instance(ec2_instance_id, private_ip_address='',
|
||||
ec2_network_interfaces=None, is_private_ip_in_vpc=True,
|
||||
floating_ip=None):
|
||||
floating_ip=None, image_id=None, kernel_id=None,
|
||||
ramdisk_id=None):
|
||||
"""Generate EC2 Instance dictionary.
|
||||
|
||||
private_ip_address must be specified as IP value or None
|
||||
|
@ -1473,7 +1498,7 @@ def gen_ec2_instance(ec2_instance_id, private_ip_address='',
|
|||
'placement': {'availabilityZone': None},
|
||||
'dnsName': floating_ip,
|
||||
'instanceState': {'code': 0, 'name': 'pending'},
|
||||
'imageId': None,
|
||||
'imageId': image_id,
|
||||
'productCodesSet': [],
|
||||
'privateDnsName': ec2_instance_id,
|
||||
'keyName': None,
|
||||
|
@ -1489,6 +1514,10 @@ def gen_ec2_instance(ec2_instance_id, private_ip_address='',
|
|||
ec2_instance['vpcId'] = ec2_network_interfaces[0]['vpcId']
|
||||
if private_ip_address and is_private_ip_in_vpc:
|
||||
ec2_instance['subnetId'] = ec2_network_interfaces[0]['subnetId']
|
||||
if kernel_id:
|
||||
ec2_instance['kernelId'] = kernel_id
|
||||
if ramdisk_id:
|
||||
ec2_instance['ramdiskId'] = ramdisk_id
|
||||
return ec2_instance
|
||||
|
||||
|
||||
|
|
|
@ -63,9 +63,14 @@ class InstanceTestCase(base.ApiTestCase):
|
|||
fakes.get_db_api_get_item_by_id(
|
||||
{fakes.ID_EC2_SUBNET_1: fakes.DB_SUBNET_1,
|
||||
fakes.ID_EC2_NETWORK_INTERFACE_1:
|
||||
copy.deepcopy(fakes.DB_NETWORK_INTERFACE_1)}))
|
||||
self.db_api.get_item_ids.return_value = [
|
||||
(fakes.ID_EC2_IMAGE_1, fakes.ID_OS_IMAGE_1)]
|
||||
copy.deepcopy(fakes.DB_NETWORK_INTERFACE_1),
|
||||
fakes.ID_EC2_IMAGE_1: fakes.DB_IMAGE_1}))
|
||||
self.db_api.get_item_ids.side_effect = (
|
||||
fakes.get_db_api_get_item_by_id({
|
||||
(fakes.ID_OS_IMAGE_ARI_1,): [(fakes.ID_EC2_IMAGE_ARI_1,
|
||||
fakes.ID_OS_IMAGE_ARI_1)],
|
||||
(fakes.ID_OS_IMAGE_AKI_1,): [(fakes.ID_EC2_IMAGE_AKI_1,
|
||||
fakes.ID_OS_IMAGE_AKI_1)]}))
|
||||
self.neutron.list_ports.return_value = (
|
||||
{'ports': [fakes.OS_PORT_1, fakes.OS_PORT_2]})
|
||||
self.create_network_interface.return_value = (
|
||||
|
@ -83,7 +88,8 @@ class InstanceTestCase(base.ApiTestCase):
|
|||
fakes.ID_EC2_SUBNET_1: [
|
||||
{'addr': fakes.IP_NETWORK_INTERFACE_1,
|
||||
'version': 4,
|
||||
'OS-EXT-IPS:type': 'fixed'}]}))
|
||||
'OS-EXT-IPS:type': 'fixed'}]},
|
||||
image={'id': fakes.ID_OS_IMAGE_1}))
|
||||
self.novadb.instance_get_by_uuid.return_value = fakes.NOVADB_INSTANCE_1
|
||||
self.novadb.block_device_mapping_get_all_by_instance.return_value = []
|
||||
fake_flavor = self.fake_flavor_class('fake_flavor')
|
||||
|
@ -122,7 +128,10 @@ class InstanceTestCase(base.ApiTestCase):
|
|||
[fakes.gen_ec2_instance(
|
||||
fakes.ID_EC2_INSTANCE_1,
|
||||
private_ip_address=fakes.IP_NETWORK_INTERFACE_1,
|
||||
ec2_network_interfaces=[eni])])
|
||||
ec2_network_interfaces=[eni],
|
||||
image_id=fakes.ID_EC2_IMAGE_1,
|
||||
kernel_id=fakes.ID_EC2_IMAGE_AKI_1,
|
||||
ramdisk_id=fakes.ID_EC2_IMAGE_ARI_1)])
|
||||
self.assertThat(resp, matchers.DictMatches(expected_reservation))
|
||||
if new_port:
|
||||
self.create_network_interface.assert_called_once_with(
|
||||
|
@ -136,8 +145,11 @@ class InstanceTestCase(base.ApiTestCase):
|
|||
security_groups=None,
|
||||
nics=[{'port-id': fakes.ID_OS_PORT_1}],
|
||||
key_name=None, userdata=None)
|
||||
self.db_api.get_item_ids.assert_called_once_with(
|
||||
mock.ANY, 'ami', (fakes.ID_EC2_IMAGE_1,))
|
||||
self.assertEqual(2, self.db_api.get_item_ids.call_count)
|
||||
self.db_api.get_item_ids.assert_any_call(
|
||||
mock.ANY, 'aki', (fakes.ID_OS_IMAGE_AKI_1,))
|
||||
self.db_api.get_item_ids.assert_any_call(
|
||||
mock.ANY, 'ari', (fakes.ID_OS_IMAGE_ARI_1,))
|
||||
self.db_api.update_item.assert_called_once_with(
|
||||
mock.ANY, db_attached_eni)
|
||||
self.isotime.assert_called_once_with(None, True)
|
||||
|
@ -491,6 +503,12 @@ class InstanceTestCase(base.ApiTestCase):
|
|||
if kind == 'i' else
|
||||
[fakes.DB_IMAGE_1, fakes.DB_IMAGE_2]
|
||||
if kind == 'ami' else [])
|
||||
self.db_api.get_item_ids.side_effect = (
|
||||
fakes.get_db_api_get_item_by_id({
|
||||
(fakes.ID_OS_IMAGE_ARI_1,): [(fakes.ID_EC2_IMAGE_ARI_1,
|
||||
fakes.ID_OS_IMAGE_ARI_1)],
|
||||
(fakes.ID_OS_IMAGE_AKI_1,): [(fakes.ID_EC2_IMAGE_AKI_1,
|
||||
fakes.ID_OS_IMAGE_AKI_1)]}))
|
||||
self.neutron.list_floatingips.return_value = (
|
||||
{'floatingips': [fakes.OS_FLOATING_IP_1,
|
||||
fakes.OS_FLOATING_IP_2]})
|
||||
|
|
|
@ -27,16 +27,16 @@ class KeyPairCase(base.ApiTestCase):
|
|||
def test_create_key_pair(self):
|
||||
self.nova_key_pairs.create.return_value = (
|
||||
fakes.NovaKeyPair(fakes.OS_KEY_PAIR))
|
||||
resp = self.execute('CreateKeyPair', {'KeyName': 'keyname'})
|
||||
resp = self.execute('CreateKeyPair', {'KeyName': fakes.NAME_KEY_PAIR})
|
||||
self.assertEqual(200, resp['http_status_code'])
|
||||
self.assertThat(fakes.EC2_KEY_PAIR, matchers.DictMatches(
|
||||
tools.purge_dict(resp, {'http_status_code'})))
|
||||
self.nova_key_pairs.create.assert_called_once_with('keyname')
|
||||
self.nova_key_pairs.create.assert_called_once_with(fakes.NAME_KEY_PAIR)
|
||||
|
||||
def test_create_key_pair_invalid(self):
|
||||
self.nova_key_pairs.create.side_effect = (
|
||||
nova_exception.Conflict(409))
|
||||
resp = self.execute('CreateKeyPair', {'KeyName': 'keyname'})
|
||||
resp = self.execute('CreateKeyPair', {'KeyName': fakes.NAME_KEY_PAIR})
|
||||
self.assertEqual(400, resp['http_status_code'])
|
||||
self.assertEqual('InvalidKeyPair.Duplicate', resp['Error']['Code'])
|
||||
resp = self.execute('CreateKeyPair', {'KeyName': 'k' * 256})
|
||||
|
@ -44,7 +44,7 @@ class KeyPairCase(base.ApiTestCase):
|
|||
self.assertEqual('InvalidParameterValue', resp['Error']['Code'])
|
||||
self.nova_key_pairs.create.side_effect = (
|
||||
nova_exception.OverLimit(413))
|
||||
resp = self.execute('CreateKeyPair', {'KeyName': 'keyname'})
|
||||
resp = self.execute('CreateKeyPair', {'KeyName': fakes.NAME_KEY_PAIR})
|
||||
self.assertEqual(400, resp['http_status_code'])
|
||||
self.assertEqual('ResourceLimitExceeded', resp['Error']['Code'])
|
||||
|
||||
|
@ -52,30 +52,30 @@ class KeyPairCase(base.ApiTestCase):
|
|||
self.nova_key_pairs.create.return_value = (
|
||||
fakes.NovaKeyPair(fakes.OS_KEY_PAIR))
|
||||
resp = self.execute('ImportKeyPair',
|
||||
{'KeyName': 'keyname',
|
||||
{'KeyName': fakes.NAME_KEY_PAIR,
|
||||
'PublicKeyMaterial': base64.b64encode(
|
||||
fakes.PUBLIC_KEY)})
|
||||
fakes.PUBLIC_KEY_KEY_PAIR)})
|
||||
self.assertEqual(200, resp['http_status_code'])
|
||||
self.assertThat(tools.purge_dict(fakes.EC2_KEY_PAIR, {'keyMaterial'}),
|
||||
matchers.DictMatches(tools.purge_dict(resp, {'http_status_code'})))
|
||||
self.nova_key_pairs.create.assert_called_once_with('keyname',
|
||||
fakes.PUBLIC_KEY)
|
||||
self.nova_key_pairs.create.assert_called_once_with(
|
||||
fakes.NAME_KEY_PAIR, fakes.PUBLIC_KEY_KEY_PAIR)
|
||||
|
||||
def test_import_key_pair_invalid(self):
|
||||
self.nova_key_pairs.create.side_effect = (
|
||||
nova_exception.OverLimit(413))
|
||||
resp = self.execute('ImportKeyPair',
|
||||
{'KeyName': 'keyname',
|
||||
{'KeyName': fakes.NAME_KEY_PAIR,
|
||||
'PublicKeyMaterial': base64.b64encode(
|
||||
fakes.PUBLIC_KEY)})
|
||||
fakes.PUBLIC_KEY_KEY_PAIR)})
|
||||
self.assertEqual(400, resp['http_status_code'])
|
||||
self.assertEqual('ResourceLimitExceeded', resp['Error']['Code'])
|
||||
|
||||
def test_delete_key_pair(self):
|
||||
self.nova_key_pairs.delete.return_value = True
|
||||
resp = self.execute('DeleteKeyPair', {'KeyName': 'keyname'})
|
||||
resp = self.execute('DeleteKeyPair', {'KeyName': fakes.NAME_KEY_PAIR})
|
||||
self.assertEqual(200, resp['http_status_code'])
|
||||
self.nova_key_pairs.delete.assert_called_once_with('keyname')
|
||||
self.nova_key_pairs.delete.assert_called_once_with(fakes.NAME_KEY_PAIR)
|
||||
self.nova_key_pairs.delete.side_effect = nova_exception.NotFound(404)
|
||||
resp = self.execute('DeleteKeyPair', {'KeyName': 'keyname1'})
|
||||
self.assertEqual(200, resp['http_status_code'])
|
||||
|
|
|
@ -18,7 +18,9 @@ from oslotest import base as test_base
|
|||
import testtools
|
||||
import webob
|
||||
|
||||
from ec2api import exception
|
||||
from ec2api import metadata
|
||||
from ec2api.tests import fakes
|
||||
from ec2api.tests import matchers
|
||||
|
||||
|
||||
|
@ -43,9 +45,101 @@ class ProxyTestCase(test_base.BaseTestCase):
|
|||
conf.set_override('metadata_proxy_shared_secret', 'secret',
|
||||
group='metadata')
|
||||
|
||||
@mock.patch('ec2api.metadata.api.get_version_list')
|
||||
def test_callable(self, get_version_list):
|
||||
get_version_list.return_value = 'foo'
|
||||
request = webob.Request.blank('/')
|
||||
response = request.get_response(self.handler)
|
||||
self.assertEqual(200, response.status_int)
|
||||
self.assertEqual('foo', response.body)
|
||||
|
||||
@mock.patch('ec2api.metadata.api.get_version_list')
|
||||
def test_root(self, get_version_list):
|
||||
get_version_list.return_value = 'fake_version'
|
||||
request = webob.Request.blank('/')
|
||||
response = request.get_response(self.handler)
|
||||
self.assertEqual('fake_version', response.body)
|
||||
response_ctype = response.headers['Content-Type']
|
||||
self.assertTrue(response_ctype.startswith("text/plain"))
|
||||
get_version_list.assert_called_with()
|
||||
|
||||
request = webob.Request.blank('/foo/../')
|
||||
response = request.get_response(self.handler)
|
||||
self.assertEqual('fake_version', response.body)
|
||||
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_get_metadata')
|
||||
def test_version_root(self, get_metadata):
|
||||
get_metadata.return_value = 'fake'
|
||||
request = webob.Request.blank('/latest')
|
||||
response = request.get_response(self.handler)
|
||||
self.assertEqual('fake', response.body)
|
||||
response_ctype = response.headers['Content-Type']
|
||||
self.assertTrue(response_ctype.startswith("text/plain"))
|
||||
get_metadata.assert_called_with(mock.ANY, ['latest'])
|
||||
|
||||
get_metadata.side_effect = exception.EC2MetadataNotFound()
|
||||
request = webob.Request.blank('/latest')
|
||||
response = request.get_response(self.handler)
|
||||
self.assertEqual(404, response.status_int)
|
||||
|
||||
with mock.patch.object(metadata, 'LOG') as log:
|
||||
get_metadata.side_effect = Exception()
|
||||
request = webob.Request.blank('/latest')
|
||||
response = request.get_response(self.handler)
|
||||
self.assertEqual(500, response.status_int)
|
||||
self.assertEqual(len(log.mock_calls), 2)
|
||||
|
||||
@mock.patch('ec2api.metadata.api.get_metadata_item')
|
||||
@mock.patch('ec2api.metadata.api.get_os_instance_and_project_id')
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_get_remote_ip')
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_get_context')
|
||||
def test_get_metadata_by_ip(self, get_context, get_remote_ip, get_ids,
|
||||
get_metadata_item):
|
||||
get_context.return_value = mock.Mock(project_id='fake_admin_project')
|
||||
get_remote_ip.return_value = 'fake_instance_ip'
|
||||
get_ids.return_value = ('fake_instance_id', 'fake_project_id')
|
||||
get_metadata_item.return_value = 'fake_item'
|
||||
req = mock.Mock(headers={})
|
||||
|
||||
retval = self.handler._get_metadata(req, ['fake_ver', 'fake_attr'])
|
||||
self.assertEqual('fake_item', retval)
|
||||
get_context.assert_called_with()
|
||||
get_remote_ip.assert_called_with(req)
|
||||
get_ids.assert_called_with(get_context.return_value,
|
||||
'fake_instance_ip')
|
||||
get_metadata_item.assert_called_with(get_context.return_value,
|
||||
['fake_ver', 'fake_attr'],
|
||||
'fake_instance_id',
|
||||
'fake_instance_ip')
|
||||
self.assertEqual('fake_project_id',
|
||||
get_context.return_value.project_id)
|
||||
|
||||
@mock.patch('ec2api.metadata.api.get_metadata_item')
|
||||
@mock.patch.object(metadata.MetadataRequestHandler,
|
||||
'_unpack_request_attributes')
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_get_context')
|
||||
def test_get_metadata_by_instance_id(self, get_context, unpack_request,
|
||||
get_metadata_item):
|
||||
get_context.return_value = mock.Mock(project_id='fake_admin_project')
|
||||
unpack_request.return_value = ('fake_instance_id', 'fake_project_id',
|
||||
'fake_instance_ip')
|
||||
get_metadata_item.return_value = 'fake_item'
|
||||
req = mock.Mock(headers={'X-Instance-ID': 'fake_instance_id'})
|
||||
|
||||
retval = self.handler._get_metadata(req, ['fake_ver', 'fake_attr'])
|
||||
self.assertEqual('fake_item', retval)
|
||||
get_context.assert_called_with()
|
||||
unpack_request.assert_called_with(req)
|
||||
get_metadata_item.assert_called_with(get_context.return_value,
|
||||
['fake_ver', 'fake_attr'],
|
||||
'fake_instance_id',
|
||||
'fake_instance_ip')
|
||||
self.assertEqual('fake_project_id',
|
||||
get_context.return_value.project_id)
|
||||
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_proxy_request')
|
||||
def test_call(self, proxy):
|
||||
req = mock.Mock()
|
||||
def test_proxy_call(self, proxy):
|
||||
req = mock.Mock(path_info='/openstack')
|
||||
proxy.return_value = 'value'
|
||||
|
||||
retval = self.handler(req)
|
||||
|
@ -53,13 +147,24 @@ class ProxyTestCase(test_base.BaseTestCase):
|
|||
|
||||
@mock.patch.object(metadata, 'LOG')
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_proxy_request')
|
||||
def test_call_internal_server_error(self, proxy, log):
|
||||
req = mock.Mock()
|
||||
proxy.side_effect = Exception
|
||||
def test_proxy_call_internal_server_error(self, proxy, log):
|
||||
req = mock.Mock(path_info='/openstack')
|
||||
proxy.side_effect = Exception()
|
||||
retval = self.handler(req)
|
||||
self.assertIsInstance(retval, webob.exc.HTTPInternalServerError)
|
||||
self.assertEqual(len(log.mock_calls), 2)
|
||||
|
||||
proxy.side_effect = exception.EC2MetadataException()
|
||||
retval = self.handler(req)
|
||||
self.assertIsInstance(retval, webob.exc.HTTPInternalServerError)
|
||||
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_proxy_request')
|
||||
def test_proxy_call_no_instance(self, proxy):
|
||||
req = mock.Mock(path_info='/openstack')
|
||||
proxy.side_effect = exception.EC2MetadataNotFound()
|
||||
retval = self.handler(req)
|
||||
self.assertIsInstance(retval, webob.exc.HTTPNotFound)
|
||||
|
||||
@mock.patch.object(metadata.MetadataRequestHandler,
|
||||
'_build_proxy_request_headers')
|
||||
def _proxy_request_test_helper(self, build_headers,
|
||||
|
@ -67,7 +172,7 @@ class ProxyTestCase(test_base.BaseTestCase):
|
|||
hdrs = {'X-Forwarded-For': '8.8.8.8'}
|
||||
body = 'body'
|
||||
|
||||
req = mock.Mock(path_info='/the_path', query_string='', headers=hdrs,
|
||||
req = mock.Mock(path_info='/openstack', query_string='', headers=hdrs,
|
||||
method=method, body=body)
|
||||
resp = mock.MagicMock(status=response_code)
|
||||
req.response = resp
|
||||
|
@ -87,7 +192,7 @@ class ProxyTestCase(test_base.BaseTestCase):
|
|||
cfg.CONF.metadata.nova_metadata_port)
|
||||
),
|
||||
mock.call().request(
|
||||
'http://9.9.9.9:8775/the_path',
|
||||
'http://9.9.9.9:8775/openstack',
|
||||
method=method,
|
||||
headers={
|
||||
'X-Forwarded-For': '8.8.8.8',
|
||||
|
@ -138,19 +243,10 @@ class ProxyTestCase(test_base.BaseTestCase):
|
|||
with testtools.ExpectedException(Exception):
|
||||
self._proxy_request_test_helper(response_code=302)
|
||||
|
||||
@mock.patch.object(metadata.MetadataRequestHandler,
|
||||
'_build_proxy_request_headers')
|
||||
def test_proxy_request_no_headers(self, build_headers):
|
||||
build_headers.return_value = None
|
||||
self.assertIsInstance(
|
||||
self.handler._proxy_request('fake_request'),
|
||||
webob.exc.HTTPNotFound)
|
||||
build_headers.assert_called_once_with('fake_request')
|
||||
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_sign_instance_id')
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_get_context')
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_get_instance_ip')
|
||||
def test_build_proxy_request_headers(self, get_instance_ip, get_context,
|
||||
@mock.patch.object(metadata.MetadataRequestHandler, '_get_remote_ip')
|
||||
def test_build_proxy_request_headers(self, get_remote_ip, get_context,
|
||||
sign_instance_id):
|
||||
req = mock.Mock(headers={})
|
||||
|
||||
|
@ -161,15 +257,12 @@ class ProxyTestCase(test_base.BaseTestCase):
|
|||
matchers.DictMatches(req.headers))
|
||||
|
||||
req.headers = {'fake_key': 'fake_value'}
|
||||
get_instance_ip.return_value = 'fake_instance_ip'
|
||||
get_remote_ip.return_value = 'fake_instance_ip'
|
||||
get_context.return_value = 'fake_context'
|
||||
sign_instance_id.return_value = 'signed'
|
||||
|
||||
with mock.patch('ec2api.metadata.api.'
|
||||
'get_instance_and_project_id') as get_ids:
|
||||
|
||||
get_ids.return_value = None, None
|
||||
self.assertIsNone(self.handler._build_proxy_request_headers(req))
|
||||
'get_os_instance_and_project_id') as get_ids:
|
||||
|
||||
get_ids.return_value = ('fake_instance_id', 'fake_project_id')
|
||||
self.assertThat(self.handler._build_proxy_request_headers(req),
|
||||
|
@ -178,31 +271,35 @@ class ProxyTestCase(test_base.BaseTestCase):
|
|||
'X-Instance-ID': 'fake_instance_id',
|
||||
'X-Tenant-ID': 'fake_project_id',
|
||||
'X-Instance-ID-Signature': 'signed'}))
|
||||
get_instance_ip.assert_called_with(req)
|
||||
get_remote_ip.assert_called_with(req)
|
||||
get_context.assert_called_with()
|
||||
sign_instance_id.assert_called_with('fake_instance_id')
|
||||
get_ids.assert_called_with('fake_context', 'fake_instance_ip')
|
||||
|
||||
get_ids.side_effect = exception.EC2MetadataNotFound()
|
||||
self.assertRaises(exception.EC2MetadataNotFound,
|
||||
self.handler._build_proxy_request_headers, req)
|
||||
|
||||
def test_sign_instance_id(self):
|
||||
self.assertEqual(
|
||||
self.handler._sign_instance_id('foo'),
|
||||
'773ba44693c7553d6ee20f61ea5d2757a9a4f4a44d2841ae4e95b52e4cd62db4'
|
||||
'773ba44693c7553d6ee20f61ea5d2757a9a4f4a44d2841ae4e95b52e4cd62db4',
|
||||
self.handler._sign_instance_id('foo')
|
||||
)
|
||||
|
||||
def test_get_instance_ip(self):
|
||||
def test_get_remote_ip(self):
|
||||
req = mock.Mock(remote_addr='fake_addr', headers={})
|
||||
|
||||
self.assertEqual('fake_addr', self.handler._get_instance_ip(req))
|
||||
self.assertEqual('fake_addr', self.handler._get_remote_ip(req))
|
||||
|
||||
cfg.CONF.set_override('use_forwarded_for', True)
|
||||
self.assertEqual('fake_addr', self.handler._get_instance_ip(req))
|
||||
self.assertEqual('fake_addr', self.handler._get_remote_ip(req))
|
||||
|
||||
req.headers['X-Forwarded-For'] = 'fake_forwarded_for'
|
||||
self.assertEqual('fake_forwarded_for',
|
||||
self.handler._get_instance_ip(req))
|
||||
self.handler._get_remote_ip(req))
|
||||
|
||||
cfg.CONF.set_override('use_forwarded_for', False)
|
||||
self.assertEqual('fake_addr', self.handler._get_instance_ip(req))
|
||||
self.assertEqual('fake_addr', self.handler._get_remote_ip(req))
|
||||
|
||||
@mock.patch('keystoneclient.v2_0.client.Client')
|
||||
def test_get_context(self, keystone):
|
||||
|
@ -217,9 +314,96 @@ class ProxyTestCase(test_base.BaseTestCase):
|
|||
self.assertEqual('fake_project_id', context.project_id)
|
||||
self.assertEqual('fake_token', context.auth_token)
|
||||
self.assertEqual('fake_service_catalog', context.service_catalog)
|
||||
self.assertTrue(context.is_admin)
|
||||
self.assertTrue(context.cross_tenants)
|
||||
conf = cfg.CONF
|
||||
keystone.assert_called_with(
|
||||
username=conf.metadata.admin_user,
|
||||
password=conf.metadata.admin_password,
|
||||
tenant_name=conf.metadata.admin_tenant_name,
|
||||
auth_url=conf.keystone_url)
|
||||
|
||||
def test_unpack_request_attributes(self):
|
||||
sign = (
|
||||
'97e7709481495f1a3a589e5ee03f8b5d51a3e0196768e300c441b58fe0382f4d')
|
||||
req = mock.Mock(headers={'X-Instance-ID': 'fake_instance_id',
|
||||
'X-Tenant-ID': 'fake_project_id',
|
||||
'X-Forwarded-For': 'fake_instance_ip',
|
||||
'X-Instance-ID-Signature': sign})
|
||||
retval = self.handler._unpack_request_attributes(req)
|
||||
self.assertEqual(
|
||||
('fake_instance_id', 'fake_project_id', 'fake_instance_ip'),
|
||||
retval)
|
||||
|
||||
req.headers['X-Instance-ID-Signature'] = 'fake'
|
||||
self.assertRaises(webob.exc.HTTPForbidden,
|
||||
self.handler._unpack_request_attributes, req)
|
||||
|
||||
req.headers.pop('X-Tenant-ID')
|
||||
self.assertRaises(webob.exc.HTTPBadRequest,
|
||||
self.handler._unpack_request_attributes, req)
|
||||
|
||||
req.headers.pop('X-Forwarded-For')
|
||||
self.assertRaises(exception.EC2MetadataInvalidAddress,
|
||||
self.handler._unpack_request_attributes, req)
|
||||
|
||||
@mock.patch('ec2api.utils.constant_time_compare')
|
||||
def test_usage_of_constant_time_compare(self, constant_time_compare):
|
||||
sign = (
|
||||
'97e7709481495f1a3a589e5ee03f8b5d51a3e0196768e300c441b58fe0382f4d')
|
||||
req = mock.Mock(headers={'X-Instance-ID': 'fake_instance_id',
|
||||
'X-Tenant-ID': 'fake_project_id',
|
||||
'X-Forwarded-For': 'fake_instance_ip',
|
||||
'X-Instance-ID-Signature': sign})
|
||||
self.handler._unpack_request_attributes(req)
|
||||
self.assertEqual(1, constant_time_compare.call_count)
|
||||
|
||||
@mock.patch('keystoneclient.v2_0.client.Client')
|
||||
@mock.patch('novaclient.v1_1.client.Client')
|
||||
@mock.patch('ec2api.db.api.IMPL')
|
||||
@mock.patch('ec2api.metadata.api.instance_api')
|
||||
@mock.patch('ec2api.metadata.api.novadb')
|
||||
def test_get_metadata(self, novadb, instance_api, db_api, nova, keystone):
|
||||
service_catalog = mock.MagicMock()
|
||||
service_catalog.get_data.return_value = []
|
||||
keystone.return_value = mock.Mock(auth_user_id='fake_user_id',
|
||||
auth_tenant_id='fake_project_id',
|
||||
auth_token='fake_token',
|
||||
service_catalog=service_catalog)
|
||||
nova.return_value.fixed_ips.get.return_value = (
|
||||
mock.Mock(hostname='fake_name'))
|
||||
nova.return_value.servers.list.return_value = [fakes.OS_INSTANCE_1]
|
||||
db_api.get_item_ids.return_value = [
|
||||
(fakes.ID_EC2_INSTANCE_1, fakes.ID_OS_INSTANCE_1)]
|
||||
instance_api.describe_instances.return_value = {
|
||||
'reservationSet': [fakes.EC2_RESERVATION_1]}
|
||||
instance_api.describe_instance_attribute.return_value = {
|
||||
'instanceId': fakes.ID_EC2_INSTANCE_1,
|
||||
'userData': {'value': 'fake_user_data'}}
|
||||
novadb.instance_get_by_uuid.return_value = fakes.NOVADB_INSTANCE_1
|
||||
novadb.block_device_mapping_get_all_by_instance.return_value = []
|
||||
novadb.instance_get_by_uuid.return_value = fakes.NOVADB_INSTANCE_1
|
||||
|
||||
def _test_metadata_path(relpath):
|
||||
# recursively confirm a http 200 from all meta-data elements
|
||||
# available at relpath.
|
||||
request = webob.Request.blank(
|
||||
relpath, remote_addr=fakes.IP_NETWORK_INTERFACE_2)
|
||||
response = request.get_response(self.handler)
|
||||
for item in response.body.split('\n'):
|
||||
if 'public-keys' in relpath:
|
||||
# meta-data/public-keys/0=keyname refers to
|
||||
# meta-data/public-keys/0
|
||||
item = item.split('=')[0]
|
||||
if item.endswith('/'):
|
||||
path = relpath + '/' + item
|
||||
_test_metadata_path(path)
|
||||
continue
|
||||
|
||||
path = relpath + '/' + item
|
||||
request = webob.Request.blank(
|
||||
path, remote_addr=fakes.IP_NETWORK_INTERFACE_2)
|
||||
response = request.get_response(self.handler)
|
||||
self.assertEqual(200, response.status_int, message=path)
|
||||
|
||||
_test_metadata_path('/latest')
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
import mock
|
||||
from novaclient import exceptions as nova_exception
|
||||
|
||||
from ec2api import exception
|
||||
from ec2api.metadata import api
|
||||
from ec2api.tests import base
|
||||
from ec2api.tests import fakes
|
||||
|
@ -25,32 +26,190 @@ class MetadataApiTestCase(base.ApiTestCase):
|
|||
# fake context are. ApiTestCase should be split to some classes to use
|
||||
# its feature optimally
|
||||
|
||||
def setUp(self):
|
||||
super(MetadataApiTestCase, self).setUp()
|
||||
|
||||
novadb_patcher = mock.patch('ec2api.metadata.api.novadb')
|
||||
self.novadb = novadb_patcher.start()
|
||||
self.addCleanup(novadb_patcher.stop)
|
||||
|
||||
instance_api_patcher = mock.patch('ec2api.metadata.api.instance_api')
|
||||
self.instance_api = instance_api_patcher.start()
|
||||
self.addCleanup(instance_api_patcher.stop)
|
||||
|
||||
self.db_api.get_item_ids.return_value = [
|
||||
(fakes.ID_EC2_INSTANCE_1, fakes.ID_OS_INSTANCE_1)]
|
||||
self.instance_api.describe_instances.return_value = {
|
||||
'reservationSet': [fakes.EC2_RESERVATION_1]}
|
||||
self.instance_api.describe_instance_attribute.return_value = {
|
||||
'instanceId': fakes.ID_EC2_INSTANCE_1,
|
||||
'userData': {'value': 'fake_user_data'}}
|
||||
self.novadb.instance_get_by_uuid.return_value = fakes.NOVADB_INSTANCE_1
|
||||
self.novadb.block_device_mapping_get_all_by_instance.return_value = []
|
||||
self.novadb.instance_get_by_uuid.return_value = fakes.NOVADB_INSTANCE_1
|
||||
|
||||
self.fake_context = self._create_context()
|
||||
|
||||
def test_get_version_list(self):
|
||||
retval = api.get_version_list()
|
||||
self.assertEqual('\n'.join(api.VERSIONS + ['latest']), retval)
|
||||
|
||||
def test_get_instance_and_project_id(self):
|
||||
fake_context = self._create_context()
|
||||
|
||||
def check_none_result():
|
||||
self.assertEqual((None, None),
|
||||
api.get_instance_and_project_id(
|
||||
fake_context,
|
||||
fakes.IP_NETWORK_INTERFACE_2))
|
||||
|
||||
self.nova_fixed_ips.get.return_value = mock.Mock(hostname=None)
|
||||
check_none_result()
|
||||
|
||||
self.nova_servers.list.return_value = [fakes.OS_INSTANCE_2]
|
||||
check_none_result()
|
||||
|
||||
self.nova_servers.list.return_value = [fakes.OS_INSTANCE_1,
|
||||
fakes.OS_INSTANCE_2]
|
||||
self.nova_fixed_ips.get.return_value = mock.Mock(hostname='fake_name')
|
||||
self.assertEqual((fakes.ID_OS_INSTANCE_1, fakes.ID_OS_PROJECT),
|
||||
api.get_instance_and_project_id(
|
||||
fake_context, fakes.IP_NETWORK_INTERFACE_2))
|
||||
self.assertEqual(
|
||||
(fakes.ID_OS_INSTANCE_1, fakes.ID_OS_PROJECT),
|
||||
api.get_os_instance_and_project_id(self.fake_context,
|
||||
fakes.IP_NETWORK_INTERFACE_2))
|
||||
self.nova_fixed_ips.get.assert_called_with(
|
||||
fakes.IP_NETWORK_INTERFACE_2)
|
||||
self.nova_servers.list.assert_called_with(
|
||||
search_opts={'hostname': 'fake_name',
|
||||
'all_tenants': True})
|
||||
|
||||
def check_raise():
|
||||
self.assertRaises(exception.EC2MetadataNotFound,
|
||||
api.get_os_instance_and_project_id,
|
||||
self.fake_context,
|
||||
fakes.IP_NETWORK_INTERFACE_2)
|
||||
|
||||
self.nova_servers.list.return_value = [fakes.OS_INSTANCE_2]
|
||||
check_raise()
|
||||
|
||||
self.nova_fixed_ips.get.side_effect = nova_exception.NotFound('fake')
|
||||
check_none_result()
|
||||
self.nova_servers.list.return_value = [fakes.OS_INSTANCE_1,
|
||||
fakes.OS_INSTANCE_2]
|
||||
check_raise()
|
||||
|
||||
def test_get_version_root(self):
|
||||
retval = api.get_metadata_item(self.fake_context, ['2009-04-04'],
|
||||
fakes.ID_OS_INSTANCE_1,
|
||||
fakes.IP_NETWORK_INTERFACE_2)
|
||||
self.assertEqual('meta-data/\nuser-data', retval)
|
||||
|
||||
self.assertRaises(
|
||||
exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context, ['9999-99-99'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
|
||||
self.db_api.get_item_ids.assert_called_with(
|
||||
self.fake_context, 'i', (fakes.ID_OS_INSTANCE_1,))
|
||||
self.instance_api.describe_instances.assert_called_with(
|
||||
self.fake_context, [fakes.ID_EC2_INSTANCE_1])
|
||||
self.instance_api.describe_instance_attribute.assert_called_with(
|
||||
self.fake_context, fakes.ID_EC2_INSTANCE_1, 'userData')
|
||||
self.novadb.instance_get_by_uuid.assert_called_with(
|
||||
self.fake_context, fakes.ID_OS_INSTANCE_1)
|
||||
(self.novadb.block_device_mapping_get_all_by_instance.
|
||||
assert_called_with(self.fake_context, fakes.ID_OS_INSTANCE_1))
|
||||
|
||||
def test_invalid_path(self):
|
||||
self.assertRaises(exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context,
|
||||
['9999-99-99', 'user-data-invalid'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
|
||||
def test_mismatch_project_id(self):
|
||||
self.fake_context.project_id = fakes.random_os_id()
|
||||
self.assertRaises(
|
||||
exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context, ['2009-04-04'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
|
||||
def test_non_existing_instance(self):
|
||||
self.instance_api.describe_instances.return_value = {
|
||||
'reservationSet': []}
|
||||
self.assertRaises(
|
||||
exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context, ['2009-04-04'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
|
||||
def test_user_data(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context, ['2009-04-04', 'user-data'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
self.assertEqual('fake_user_data', retval)
|
||||
|
||||
def test_no_user_data(self):
|
||||
self.instance_api.describe_instance_attribute.return_value = {
|
||||
'instanceId': fakes.ID_EC2_INSTANCE_1}
|
||||
self.assertRaises(
|
||||
exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context,
|
||||
['2009-04-04', 'user-data'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
|
||||
def test_security_groups(self):
|
||||
self.instance_api.describe_instances.return_value = {
|
||||
'reservationSet': [fakes.EC2_RESERVATION_2]}
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'security-groups'],
|
||||
fakes.ID_OS_INSTANCE_2, fakes.IP_NETWORK_INTERFACE_1)
|
||||
self.assertEqual('\n'.join([fakes.NAME_DEFAULT_OS_SECURITY_GROUP,
|
||||
fakes.NAME_OTHER_OS_SECURITY_GROUP]),
|
||||
retval)
|
||||
|
||||
def test_local_hostname(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'local-hostname'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
self.assertEqual(fakes.EC2_INSTANCE_1['privateDnsName'], retval)
|
||||
|
||||
def test_local_ipv4(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'local-ipv4'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
self.assertEqual(fakes.IP_NETWORK_INTERFACE_2, retval)
|
||||
|
||||
def test_local_ipv4_from_address(self):
|
||||
self.instance_api.describe_instances.return_value = {
|
||||
'reservationSet': [fakes.EC2_RESERVATION_2]}
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'local-ipv4'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_1)
|
||||
self.assertEqual(fakes.IP_NETWORK_INTERFACE_1, retval)
|
||||
|
||||
def test_pubkey(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'public-keys'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
self.assertEqual('0=%s' % fakes.NAME_KEY_PAIR, retval)
|
||||
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'public-keys', '0', 'openssh-key'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
self.assertEqual(fakes.PUBLIC_KEY_KEY_PAIR, retval)
|
||||
|
||||
def test_image_type_ramdisk(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'ramdisk-id'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
self.assertEqual(fakes.ID_EC2_IMAGE_ARI_1, retval)
|
||||
|
||||
def test_image_type_kernel(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'kernel-id'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
self.assertEqual(fakes.ID_EC2_IMAGE_AKI_1, retval)
|
||||
|
||||
def test_check_version(self):
|
||||
retval = api.get_metadata_item(
|
||||
self.fake_context,
|
||||
['2009-04-04', 'meta-data', 'block-device-mapping'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
self.assertIsNotNone(retval)
|
||||
|
||||
self.assertRaises(
|
||||
exception.EC2MetadataNotFound,
|
||||
api.get_metadata_item, self.fake_context,
|
||||
['2007-08-29', 'meta-data', 'block-device-mapping'],
|
||||
fakes.ID_OS_INSTANCE_1, fakes.IP_NETWORK_INTERFACE_2)
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
"""Utilities and helper functions."""
|
||||
|
||||
import contextlib
|
||||
import hmac
|
||||
import shutil
|
||||
import socket
|
||||
import tempfile
|
||||
|
@ -68,3 +69,21 @@ def tempdir(**kwargs):
|
|||
shutil.rmtree(tmpdir)
|
||||
except OSError as e:
|
||||
LOG.error(_('Could not remove tmpdir: %s'), str(e))
|
||||
|
||||
|
||||
if hasattr(hmac, 'compare_digest'):
|
||||
constant_time_compare = hmac.compare_digest
|
||||
else:
|
||||
def constant_time_compare(first, second):
|
||||
"""Returns True if both string inputs are equal, otherwise False.
|
||||
|
||||
This function should take a constant amount of time regardless of
|
||||
how many characters in the strings match.
|
||||
|
||||
"""
|
||||
if len(first) != len(second):
|
||||
return False
|
||||
result = 0
|
||||
for x, y in zip(first, second):
|
||||
result |= ord(x) ^ ord(y)
|
||||
return result == 0
|
||||
|
|
14
install.sh
14
install.sh
|
@ -221,7 +221,12 @@ if [[ -z "$NOVA_CONNECTION" ]]; then
|
|||
fi
|
||||
die_if_not_set $LINENO NOVA_CONNECTION "$reason. Please set NOVA_CONNECTION environment variable to the connection string to Nova DB"
|
||||
fi
|
||||
if [[ -z "$EXTERNAL_NETWORK" ]]; then
|
||||
if [[ -n $(keystone catalog --service network) ]]; then
|
||||
VPC_SUPPORT="True"
|
||||
else
|
||||
VPC_SUPPORT="False"
|
||||
fi
|
||||
if [[ "$VPC_SUPPORT" == "True" && -z "$EXTERNAL_NETWORK" ]]; then
|
||||
declare -a newtron_output
|
||||
readarray -s 3 -t newtron_output < <(neutron net-external-list)
|
||||
if ((${#newtron_output[@]} < 2)); then
|
||||
|
@ -231,7 +236,7 @@ if [[ -z "$EXTERNAL_NETWORK" ]]; then
|
|||
else
|
||||
EXTERNAL_NETWORK=$(echo $newtron_output | awk -F '|' '{ print $3 }')
|
||||
fi
|
||||
die_if_not_set $LINENO EXTERNAL_NETWORK "$reason. Please set PUBLIC_NETWORK environment variable to the external network dedicated to EC2 elastic IP operations"
|
||||
die_if_not_set $LINENO EXTERNAL_NETWORK "$reason. Please set EXTERNAL_NETWORK environment variable to the external network dedicated to EC2 elastic IP operations"
|
||||
fi
|
||||
|
||||
#create keystone user with admin privileges
|
||||
|
@ -280,6 +285,7 @@ iniset $CONF_FILE DEFAULT verbose True
|
|||
iniset $CONF_FILE DEFAULT keystone_url "$OS_AUTH_URL"
|
||||
iniset $CONF_FILE database connection "$CONNECTION"
|
||||
iniset $CONF_FILE database connection_nova "$NOVA_CONNECTION"
|
||||
iniset $CONF_FILE DEFAULT full_vpc_support "$VPC_SUPPORT"
|
||||
iniset $CONF_FILE DEFAULT external_network "$EXTERNAL_NETWORK"
|
||||
|
||||
iniset $CONF_FILE keystone_authtoken signing_dir $SIGNING_DIR
|
||||
|
@ -290,6 +296,10 @@ iniset $CONF_FILE keystone_authtoken admin_tenant_name $SERVICE_TENANT
|
|||
iniset $CONF_FILE keystone_authtoken auth_protocol $AUTH_PROTO
|
||||
iniset $CONF_FILE keystone_authtoken auth_port $AUTH_PORT
|
||||
|
||||
iniset $CONF_FILE metadata admin_user $SERVICE_USERNAME
|
||||
iniset $CONF_FILE metadata admin_password $SERVICE_PASSWORD
|
||||
iniset $CONF_FILE metadata admin_tenant_name $SERVICE_TENANT
|
||||
|
||||
if [[ -f "$NOVA_CONF" ]]; then
|
||||
copynovaopt s3_host
|
||||
copynovaopt s3_port
|
||||
|
|
Loading…
Reference in New Issue