Implement Keypair sync for Kingbird
Keypair_sync syncs multiple keypairs from one region to another region. Add test-case for the same. Partially Depends on: Iba1c789715aa7c7d00a7a2ec23a449c72a757ac1 closes-Bug: 1640162 Change-Id: I42ed749f8e2c91740249735d32ef8494e50f17f8
This commit is contained in:
parent
ac58c2a3e4
commit
b4433b60c7
|
@ -17,6 +17,7 @@
|
|||
import pecan
|
||||
|
||||
from kingbird.api.controllers import quota_manager
|
||||
from kingbird.api.controllers import sync_manager
|
||||
from kingbird.api.controllers.v1 import quota_class
|
||||
|
||||
|
||||
|
@ -60,6 +61,7 @@ class V1Controller(object):
|
|||
self.sub_controllers = {
|
||||
"os-quota-sets": quota_manager.QuotaManagerController,
|
||||
"os-quota-class-sets": quota_class.QuotaClassSetController,
|
||||
"os-sync": sync_manager.ResourceSyncController
|
||||
}
|
||||
for name, ctrl in self.sub_controllers.items():
|
||||
setattr(self, name, ctrl)
|
||||
|
|
|
@ -0,0 +1,153 @@
|
|||
# Copyright (c) 2017 Ericsson AB
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import restcomm
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
import collections
|
||||
import pecan
|
||||
from pecan import expose
|
||||
from pecan import request
|
||||
|
||||
from kingbird.common import consts
|
||||
from kingbird.common import exceptions
|
||||
from kingbird.common.i18n import _
|
||||
from kingbird.db.sqlalchemy import api as db_api
|
||||
from kingbird.drivers.openstack import sdk
|
||||
from kingbird.rpc import client as rpc_client
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ResourceSyncController(object):
|
||||
VERSION_ALIASES = {
|
||||
'Newton': '1.0',
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ResourceSyncController, self).__init__(*args, **kwargs)
|
||||
self.rpc_client = rpc_client.EngineClient()
|
||||
|
||||
# to do the version compatibility for future purpose
|
||||
def _determine_version_cap(self, target):
|
||||
version_cap = 1.0
|
||||
return version_cap
|
||||
|
||||
@expose(generic=True, template='json')
|
||||
def index(self):
|
||||
# Route the request to specific methods with parameters
|
||||
pass
|
||||
|
||||
@index.when(method='GET', template='json')
|
||||
def get(self, project, action=None):
|
||||
context = restcomm.extract_context_from_environ()
|
||||
if not uuidutils.is_uuid_like(project) or project != context.project:
|
||||
pecan.abort(400, _('Invalid request URL'))
|
||||
result = collections.defaultdict(dict)
|
||||
if not action or action == 'active':
|
||||
result['job_set'] = db_api.sync_job_list(context, action)
|
||||
elif uuidutils.is_uuid_like(action):
|
||||
try:
|
||||
result['job_set'] = db_api.resource_sync_list_by_job(
|
||||
context, action)
|
||||
except exceptions.JobNotFound:
|
||||
pecan.abort(404, _('Job not found'))
|
||||
else:
|
||||
pecan.abort(400, _('Invalid request URL'))
|
||||
return result
|
||||
|
||||
@index.when(method='POST', template='json')
|
||||
def post(self, project):
|
||||
context = restcomm.extract_context_from_environ()
|
||||
if not uuidutils.is_uuid_like(project) or project != context.project:
|
||||
pecan.abort(400, _('Invalid request URL'))
|
||||
if not request.body:
|
||||
pecan.abort(400, _('Body required'))
|
||||
payload = eval(request.body)
|
||||
payload = payload.get('resource_set')
|
||||
if not payload:
|
||||
pecan.abort(400, _('resource_set required'))
|
||||
target_regions = payload.get('target')
|
||||
if not target_regions or not isinstance(target_regions, list):
|
||||
pecan.abort(400, _('Target regions required'))
|
||||
source_region = payload.get('source')
|
||||
if not source_region or not isinstance(source_region, str):
|
||||
pecan.abort(400, _('Source region required'))
|
||||
if payload.get('resource_type') == consts.KEYPAIR:
|
||||
user_id = context.user
|
||||
source_keys = payload.get('resources')
|
||||
if not source_keys:
|
||||
pecan.abort(400, _('Source keypairs required'))
|
||||
# Create Source Region object
|
||||
source_os_client = sdk.OpenStackDriver(source_region)
|
||||
# Check for keypairs in Source Region
|
||||
for source_keypair in source_keys:
|
||||
source_keypair = source_os_client.get_keypairs(user_id,
|
||||
source_keypair)
|
||||
if not source_keypair:
|
||||
pecan.abort(404)
|
||||
job_id = uuidutils.generate_uuid()
|
||||
# Insert into the parent table
|
||||
try:
|
||||
result = db_api.sync_job_create(context, job_id=job_id)
|
||||
except exceptions.JobNotFound:
|
||||
pecan.abort(404, _('Job not found'))
|
||||
# Insert into the child table
|
||||
for region in target_regions:
|
||||
for keypair in source_keys:
|
||||
try:
|
||||
db_api.resource_sync_create(context, result,
|
||||
region, keypair,
|
||||
consts.KEYPAIR)
|
||||
except exceptions.JobNotFound:
|
||||
pecan.abort(404, _('Job not found'))
|
||||
return self._keypair_sync(job_id, user_id, payload, context,
|
||||
result)
|
||||
else:
|
||||
pecan.abort(400, _('Bad resource_type'))
|
||||
|
||||
@index.when(method='delete', template='json')
|
||||
def delete(self, project, job_id):
|
||||
context = restcomm.extract_context_from_environ()
|
||||
if not uuidutils.is_uuid_like(project) or project != context.project:
|
||||
pecan.abort(400, _('Invalid request URL'))
|
||||
if uuidutils.is_uuid_like(job_id):
|
||||
try:
|
||||
status = db_api.sync_job_status(context, job_id)
|
||||
except exceptions.JobNotFound:
|
||||
pecan.abort(404, _('Job not found'))
|
||||
if status == consts.JOB_PROGRESS:
|
||||
pecan.abort(406, _('action not supported'
|
||||
' while sync is in progress'))
|
||||
try:
|
||||
db_api.sync_job_delete(context, job_id)
|
||||
except exceptions.JobNotFound:
|
||||
pecan.abort(404, _('Job not found'))
|
||||
return 'job %s deleted from the database.' % job_id
|
||||
else:
|
||||
pecan.abort(400, _('Bad request'))
|
||||
|
||||
def _keypair_sync(self, job_id, user_id, payload, context, result):
|
||||
self.rpc_client.keypair_sync_for_user(context, job_id, payload,
|
||||
user_id)
|
||||
|
||||
return {'job_status': {'id': result.id, 'status': result.sync_status,
|
||||
'created_at': result.created_at}}
|
|
@ -36,10 +36,14 @@ NEUTRON_QUOTA_FIELDS = ("network",
|
|||
"security_group_rule",
|
||||
)
|
||||
|
||||
SYNC_STATUS = "IN_PROGRESS"
|
||||
JOB_PROGRESS = "IN_PROGRESS"
|
||||
|
||||
RPC_API_VERSION = "1.0"
|
||||
|
||||
TOPIC_KB_ENGINE = "kingbird-engine"
|
||||
|
||||
KEYPAIR = "keypair"
|
||||
|
||||
JOB_SUCCESS = "SUCCESS"
|
||||
|
||||
JOB_FAILURE = "FAILURE"
|
||||
|
|
|
@ -402,7 +402,7 @@ def sync_job_list(context, action=None):
|
|||
if action == 'active':
|
||||
rows = model_query(context, models.SyncJob). \
|
||||
filter_by(user_id=context.user, project_id=context.project,
|
||||
sync_status=consts.SYNC_STATUS). \
|
||||
sync_status=consts.JOB_PROGRESS). \
|
||||
all()
|
||||
else:
|
||||
rows = model_query(context, models.SyncJob). \
|
||||
|
|
|
@ -25,7 +25,7 @@ def upgrade(migrate_engine):
|
|||
sqlalchemy.Column('id', sqlalchemy.String(36),
|
||||
primary_key=True),
|
||||
sqlalchemy.Column('sync_status', sqlalchemy.String(length=36),
|
||||
default=consts.SYNC_STATUS, nullable=False),
|
||||
default=consts.JOB_PROGRESS, nullable=False),
|
||||
sqlalchemy.Column('user_id', sqlalchemy.String(36),
|
||||
nullable=False),
|
||||
sqlalchemy.Column('project_id', sqlalchemy.String(36),
|
||||
|
@ -50,7 +50,7 @@ def upgrade(migrate_engine):
|
|||
sqlalchemy.Column('resource_type', sqlalchemy.String(36),
|
||||
nullable=False),
|
||||
sqlalchemy.Column('sync_status', sqlalchemy.String(36),
|
||||
default=consts.SYNC_STATUS, nullable=False),
|
||||
default=consts.JOB_PROGRESS, nullable=False),
|
||||
sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
|
||||
sqlalchemy.Column('created_at', sqlalchemy.DateTime),
|
||||
sqlalchemy.Column('deleted_at', sqlalchemy.DateTime),
|
||||
|
|
|
@ -156,7 +156,7 @@ class SyncJob(BASE, KingbirdBase):
|
|||
|
||||
id = Column('id', String(36), primary_key=True)
|
||||
|
||||
sync_status = Column(String(36), default=consts.SYNC_STATUS,
|
||||
sync_status = Column(String(36), default=consts.JOB_PROGRESS,
|
||||
nullable=False)
|
||||
|
||||
job_relation = relationship('ResourceSync', backref='sync_job')
|
||||
|
@ -180,5 +180,5 @@ class ResourceSync(BASE, KingbirdBase):
|
|||
|
||||
resource_type = Column('resource_type', String(36), nullable=False)
|
||||
|
||||
sync_status = Column(String(36), default=consts.SYNC_STATUS,
|
||||
sync_status = Column(String(36), default=consts.JOB_PROGRESS,
|
||||
nullable=False)
|
||||
|
|
|
@ -17,16 +17,17 @@ from oslo_log import log
|
|||
|
||||
from kingbird.common import consts
|
||||
from kingbird.common import exceptions
|
||||
from kingbird.common.i18n import _LI, _LE
|
||||
from kingbird.drivers import base
|
||||
|
||||
from novaclient import client
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
API_VERSION = '2.1'
|
||||
API_VERSION = '2.37'
|
||||
|
||||
|
||||
class NovaClient(base.DriverBase):
|
||||
'''Nova V2.1 driver.'''
|
||||
'''Nova V2.37 driver.'''
|
||||
def __init__(self, region, disabled_quotas, session):
|
||||
try:
|
||||
self.nova_client = client.Client(API_VERSION,
|
||||
|
@ -40,7 +41,7 @@ class NovaClient(base.DriverBase):
|
|||
raise
|
||||
|
||||
def get_resource_usages(self, project_id):
|
||||
"""Collects resource usages for a given project
|
||||
"""Collect resource usages for a given project
|
||||
|
||||
:params: project_id
|
||||
:return: dictionary of corresponding resources with its usage
|
||||
|
@ -69,7 +70,7 @@ class NovaClient(base.DriverBase):
|
|||
raise
|
||||
|
||||
def update_quota_limits(self, project_id, **new_quota):
|
||||
"""Updates quota limits for a given project
|
||||
"""Update quota limits for a given project
|
||||
|
||||
:params: project_id, dictionary with the quota limits to update
|
||||
:return: Nothing
|
||||
|
@ -97,3 +98,37 @@ class NovaClient(base.DriverBase):
|
|||
return self.nova_client.quotas.delete(project_id)
|
||||
except exceptions.InternalError:
|
||||
raise
|
||||
|
||||
def get_keypairs(self, user_id, res_id):
|
||||
"""Display keypair of the specified User
|
||||
|
||||
:params: user_id and resource_identifier
|
||||
:return: Keypair
|
||||
"""
|
||||
try:
|
||||
keypair = self.nova_client.keypairs.get(res_id, user_id)
|
||||
LOG.info(_LI("Source Keypair: %s"), keypair.name)
|
||||
return keypair
|
||||
|
||||
except Exception as exception:
|
||||
LOG.error(_LE('Exception Occurred: %s'), exception.message)
|
||||
pass
|
||||
|
||||
def create_keypairs(self, force, keypair, user_id):
|
||||
"""Create keypair for the specified User
|
||||
|
||||
:params: user_id, keypair, force
|
||||
:return: Creates a Keypair
|
||||
"""
|
||||
if force:
|
||||
try:
|
||||
self.nova_client.keypairs.delete(keypair, user_id)
|
||||
LOG.info(_LI("Deleted Keypair: %s"), keypair.name)
|
||||
except Exception as exception:
|
||||
LOG.error(_LE('Exception Occurred: %s'), exception.message)
|
||||
pass
|
||||
LOG.info(_LI("Created Keypair: %s"), keypair.name)
|
||||
return self.nova_client.keypairs. \
|
||||
create(keypair.name,
|
||||
user_id=user_id,
|
||||
public_key=keypair.public_key)
|
||||
|
|
|
@ -182,3 +182,9 @@ class OpenStackDriver(object):
|
|||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def get_keypairs(self, user_id, resource_identifier):
|
||||
return self.nova_client.get_keypairs(user_id, resource_identifier)
|
||||
|
||||
def create_keypairs(self, force, keypair, user_id):
|
||||
return self.nova_client.create_keypairs(force, keypair, user_id)
|
||||
|
|
|
@ -25,6 +25,7 @@ from kingbird.common.i18n import _, _LE, _LI
|
|||
from kingbird.common import messaging as rpc_messaging
|
||||
from kingbird.engine.quota_manager import QuotaManager
|
||||
from kingbird.engine import scheduler
|
||||
from kingbird.engine.sync_manager import SyncManager
|
||||
from kingbird.objects import service as service_obj
|
||||
from oslo_service import service
|
||||
from oslo_utils import timeutils
|
||||
|
@ -72,6 +73,7 @@ class EngineService(service.Service):
|
|||
self.target = None
|
||||
self._rpc_server = None
|
||||
self.qm = None
|
||||
self.sm = None
|
||||
|
||||
def init_tgm(self):
|
||||
self.TG = scheduler.ThreadGroupManager()
|
||||
|
@ -79,10 +81,14 @@ class EngineService(service.Service):
|
|||
def init_qm(self):
|
||||
self.qm = QuotaManager()
|
||||
|
||||
def init_sm(self):
|
||||
self.sm = SyncManager()
|
||||
|
||||
def start(self):
|
||||
self.engine_id = uuidutils.generate_uuid()
|
||||
self.init_tgm()
|
||||
self.init_qm()
|
||||
self.init_sm()
|
||||
target = oslo_messaging.Target(version=self.rpc_api_version,
|
||||
server=self.host,
|
||||
topic=self.topic)
|
||||
|
@ -147,6 +153,11 @@ class EngineService(service.Service):
|
|||
project_id)
|
||||
self.qm.quota_sync_for_project(project_id)
|
||||
|
||||
@request_context
|
||||
def keypair_sync_for_user(self, ctxt, user_id, job_id, payload):
|
||||
# Keypair Sync for a user, will be triggered by KB-API
|
||||
self.sm.keypair_sync_for_user(user_id, job_id, payload)
|
||||
|
||||
def _stop_rpc_server(self):
|
||||
# Stop RPC connection to prevent new requests
|
||||
LOG.debug(_("Attempting to stop engine service..."))
|
||||
|
|
|
@ -0,0 +1,106 @@
|
|||
# Copyright 2017 Ericsson AB
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import threading
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from kingbird.common import consts
|
||||
from kingbird.common import context
|
||||
from kingbird.common import exceptions
|
||||
from kingbird.common.i18n import _LE
|
||||
from kingbird.common.i18n import _LI
|
||||
from kingbird.common import manager
|
||||
from kingbird.db.sqlalchemy import api as db_api
|
||||
from kingbird.drivers.openstack import sdk
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SyncManager(manager.Manager):
|
||||
"""Manages tasks related to resource management"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SyncManager, self).__init__(service_name="sync_manager",
|
||||
*args, **kwargs)
|
||||
self.context = context.get_admin_context()
|
||||
|
||||
def _create_keypairs_in_region(self, job_id, force, target_regions,
|
||||
source_keypair, user_id):
|
||||
regions_thread = list()
|
||||
for region in target_regions:
|
||||
thread = threading.Thread(target=self._create_keypairs,
|
||||
args=(job_id, force, region,
|
||||
source_keypair, user_id,))
|
||||
regions_thread.append(thread)
|
||||
thread.start()
|
||||
for region_thread in regions_thread:
|
||||
region_thread.join()
|
||||
|
||||
def _create_keypairs(self, job_id, force, region, source_keypair, user_id):
|
||||
os_client = sdk.OpenStackDriver(region)
|
||||
try:
|
||||
os_client.create_keypairs(force, source_keypair, user_id)
|
||||
LOG.info(_LI('keypair %(keypair)s created in %(region)s')
|
||||
% {'keypair': source_keypair.name, 'region': region})
|
||||
try:
|
||||
db_api.resource_sync_update(self.context, job_id, region,
|
||||
source_keypair.name,
|
||||
consts.JOB_SUCCESS)
|
||||
except exceptions.JobNotFound():
|
||||
raise
|
||||
except Exception as exc:
|
||||
LOG.error(_LE('Exception Occurred: %(msg)s in %(region)s')
|
||||
% {'msg': exc.message, 'region': region})
|
||||
try:
|
||||
db_api.resource_sync_update(self.context, job_id, region,
|
||||
source_keypair.name,
|
||||
consts.JOB_FAILURE)
|
||||
except exceptions.JobNotFound():
|
||||
raise
|
||||
pass
|
||||
|
||||
def keypair_sync_for_user(self, user_id, job_id, payload):
|
||||
LOG.info(_LI("Keypair sync Called for user: %s"),
|
||||
user_id)
|
||||
keypairs_thread = list()
|
||||
target_regions = payload['target']
|
||||
force = eval(str(payload.get('force', False)))
|
||||
resource_ids = payload.get('resources')
|
||||
source_os_client = sdk.OpenStackDriver(payload['source'])
|
||||
for keypair in resource_ids:
|
||||
source_keypair = source_os_client.get_keypairs(user_id,
|
||||
keypair)
|
||||
thread = threading.Thread(target=self._create_keypairs_in_region,
|
||||
args=(job_id, force, target_regions,
|
||||
source_keypair, user_id,))
|
||||
keypairs_thread.append(thread)
|
||||
thread.start()
|
||||
for keypair_thread in keypairs_thread:
|
||||
keypair_thread.join()
|
||||
|
||||
# Update the parent_db after the sync
|
||||
try:
|
||||
resource_sync_details = db_api.\
|
||||
resource_sync_status(self.context, job_id)
|
||||
except exceptions.JobNotFound:
|
||||
raise
|
||||
result = consts.JOB_SUCCESS
|
||||
if consts.JOB_FAILURE in resource_sync_details:
|
||||
result = consts.JOB_FAILURE
|
||||
try:
|
||||
db_api.sync_job_update(self.context, job_id, result)
|
||||
except exceptions.JobNotFound:
|
||||
raise
|
|
@ -68,3 +68,9 @@ class EngineClient(object):
|
|||
def quota_sync_for_project(self, ctxt, project_id):
|
||||
return self.cast(ctxt, self.make_msg('quota_sync_for_project',
|
||||
project_id=project_id))
|
||||
|
||||
def keypair_sync_for_user(self, ctxt, job_id, payload, user_id):
|
||||
return self.cast(
|
||||
ctxt,
|
||||
self.make_msg('keypair_sync_for_user',
|
||||
user_id=user_id, job_id=job_id, payload=payload))
|
||||
|
|
|
@ -0,0 +1,225 @@
|
|||
# Copyright (c) 2017 Ericsson AB
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import webtest
|
||||
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from kingbird.api.controllers import sync_manager
|
||||
from kingbird.common import consts
|
||||
from kingbird.rpc import client as rpc_client
|
||||
from kingbird.tests.unit.api import testroot
|
||||
from kingbird.tests import utils
|
||||
|
||||
DEFAULT_FORCE = False
|
||||
SOURCE_KEYPAIR = 'fake_key1'
|
||||
FAKE_TARGET_REGION = ['fake_target_region']
|
||||
FAKE_SOURCE_REGION = 'fake_source_region'
|
||||
FAKE_RESOURCE_ID = 'fake_id'
|
||||
FAKE_RESOURCE_TYPE = 'keypair'
|
||||
FAKE_TENANT = utils.UUID1
|
||||
FAKE_JOB = utils.UUID2
|
||||
FAKE_URL = '/v1.0/' + FAKE_TENANT + '/os-sync/'
|
||||
WRONG_URL = '/v1.0/wrong/os-sync/'
|
||||
fake_user = utils.UUID3
|
||||
FAKE_STATUS = consts.JOB_PROGRESS
|
||||
FAKE_HEADERS = {'X-Tenant-Id': FAKE_TENANT, 'X_ROLE': 'admin'}
|
||||
NON_ADMIN_HEADERS = {'X-Tenant-Id': FAKE_TENANT}
|
||||
|
||||
|
||||
class FakeKeypair(object):
|
||||
def __init__(self, name, public_key):
|
||||
self.name = name
|
||||
self.public_key = public_key
|
||||
|
||||
|
||||
class SyncJob(object):
|
||||
def __init__(self, id, sync_status, created_at):
|
||||
self.id = id
|
||||
self.sync_status = sync_status
|
||||
self.created_at = created_at
|
||||
|
||||
|
||||
class TestResourceManager(testroot.KBApiTest):
|
||||
def setUp(self):
|
||||
super(TestResourceManager, self).setUp()
|
||||
self.ctx = utils.dummy_context()
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'sdk')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_post_keypair_sync(self, mock_db_api, mock_sdk, mock_rpc_client):
|
||||
time_now = timeutils.utcnow()
|
||||
data = {"resource_set": {"resources": [SOURCE_KEYPAIR],
|
||||
"resource_type": "keypair",
|
||||
"force": "True",
|
||||
"source": FAKE_SOURCE_REGION,
|
||||
"target": [FAKE_TARGET_REGION]}}
|
||||
fake_key = FakeKeypair('fake_name', 'fake-rsa')
|
||||
sync_job_result = SyncJob(FAKE_JOB, consts.JOB_PROGRESS, time_now)
|
||||
mock_sdk.OpenStackDriver().get_keypairs.return_value = fake_key
|
||||
mock_db_api.sync_job_create.return_value = sync_job_result
|
||||
response = self.app.post_json(FAKE_URL,
|
||||
headers=FAKE_HEADERS,
|
||||
params=data)
|
||||
self.assertEqual(1,
|
||||
mock_sdk.OpenStackDriver().get_keypairs.call_count)
|
||||
self.assertEqual(1,
|
||||
mock_db_api.resource_sync_create.call_count)
|
||||
self.assertEqual(1,
|
||||
mock_db_api.sync_job_create.call_count)
|
||||
self.assertEqual(response.status_int, 200)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
def test_post_keypair_sync_wrong_url(self, mock_rpc_client):
|
||||
data = {"resource_set": {"resources": [SOURCE_KEYPAIR],
|
||||
"force": "True",
|
||||
"source": FAKE_SOURCE_REGION,
|
||||
"target": [FAKE_TARGET_REGION]}}
|
||||
self.assertRaisesRegexp(webtest.app.AppError, "400 *",
|
||||
self.app.post_json, WRONG_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
def test_post_no_body(self, mock_rpc_client):
|
||||
data = {}
|
||||
self.assertRaisesRegexp(webtest.app.AppError, "400 *",
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
def test_post_wrong_payload(self, mock_rpc_client):
|
||||
data = {"resources": [SOURCE_KEYPAIR],
|
||||
"force": "True", "source": FAKE_SOURCE_REGION,
|
||||
"target": [FAKE_TARGET_REGION]}
|
||||
self.assertRaisesRegexp(webtest.app.AppError, "400 *",
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
def test_post_no_target_regions(self, mock_rpc_client):
|
||||
data = {"resource_set": {"resources": [SOURCE_KEYPAIR],
|
||||
"force": "True",
|
||||
"source": FAKE_SOURCE_REGION}}
|
||||
self.assertRaisesRegexp(webtest.app.AppError, "400 *",
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
def test_post_no_source_regions(self, mock_rpc_client):
|
||||
data = {"resource_set": {"resources": [SOURCE_KEYPAIR],
|
||||
"force": "True",
|
||||
"target": [FAKE_TARGET_REGION]}}
|
||||
self.assertRaisesRegexp(webtest.app.AppError, "400 *",
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
def test_post_no_keys_in_body(self, mock_rpc_client):
|
||||
data = {"resource_set": {"force": "True",
|
||||
"source": FAKE_SOURCE_REGION,
|
||||
"target": [FAKE_TARGET_REGION]}}
|
||||
self.assertRaisesRegexp(webtest.app.AppError, "400 *",
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
def test_post_no_resource_type(self, mock_rpc_client):
|
||||
data = {"resource_set": {"resources": [SOURCE_KEYPAIR],
|
||||
"force": "True",
|
||||
"source": FAKE_SOURCE_REGION,
|
||||
"target": [FAKE_TARGET_REGION]}}
|
||||
self.assertRaisesRegexp(webtest.app.AppError, "400 *",
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'sdk')
|
||||
def test_post_no_keypairs_in_region(self, mock_sdk, mock_rpc_client):
|
||||
data = {"resource_set": {"resources": [SOURCE_KEYPAIR],
|
||||
"resource_type": "keypair",
|
||||
"force": "True",
|
||||
"source": FAKE_SOURCE_REGION,
|
||||
"target": [FAKE_TARGET_REGION]}}
|
||||
mock_sdk.OpenStackDriver().get_keypairs.return_value = None
|
||||
self.assertRaisesRegexp(webtest.app.AppError, "404 *",
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_delete_jobs(self, mock_db_api, mock_rpc_client):
|
||||
delete_url = FAKE_URL + '/' + FAKE_JOB
|
||||
self.app.delete_json(delete_url, headers=FAKE_HEADERS)
|
||||
self.assertEqual(1, mock_db_api.sync_job_delete.call_count)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
def test_delete_wrong_request(self, mock_rpc_client):
|
||||
delete_url = WRONG_URL + '/' + FAKE_JOB
|
||||
self.assertRaisesRegex(webtest.app.AppError, "400 *",
|
||||
self.app.delete_json, delete_url,
|
||||
headers=FAKE_HEADERS)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
def test_delete_invalid_job_id(self, mock_rpc_client):
|
||||
delete_url = FAKE_URL + '/fake'
|
||||
self.assertRaisesRegex(webtest.app.AppError, "400 *",
|
||||
self.app.delete_json, delete_url,
|
||||
headers=FAKE_HEADERS)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_delete_in_progress_job(self, mock_db_api, mock_rpc_client):
|
||||
delete_url = FAKE_URL + '/' + FAKE_JOB
|
||||
mock_db_api.sync_job_status.return_value = consts.JOB_PROGRESS
|
||||
self.assertRaises(KeyError, self.app.delete_json, delete_url,
|
||||
headers=FAKE_HEADERS)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_get_job(self, mock_db_api, mock_rpc_client):
|
||||
get_url = FAKE_URL
|
||||
self.app.get(get_url, headers=FAKE_HEADERS)
|
||||
self.assertEqual(1, mock_db_api.sync_job_list.call_count)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_get_wrong_request(self, mock_db_api, mock_rpc_client):
|
||||
get_url = WRONG_URL + '/list'
|
||||
self.assertRaisesRegex(webtest.app.AppError, "400 *",
|
||||
self.app.get, get_url,
|
||||
headers=FAKE_HEADERS)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_get_wrong_action(self, mock_db_api, mock_rpc_client):
|
||||
get_url = FAKE_URL + '/fake'
|
||||
self.assertRaises(webtest.app.AppError, self.app.get, get_url,
|
||||
headers=FAKE_HEADERS)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_get_active_job(self, mock_db_api, mock_rpc_client):
|
||||
get_url = FAKE_URL + '/active'
|
||||
self.app.get(get_url, headers=FAKE_HEADERS)
|
||||
self.assertEqual(1, mock_db_api.sync_job_list.call_count)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_get_detail_job(self, mock_db_api, mock_rpc_client):
|
||||
get_url = FAKE_URL + '/' + FAKE_JOB
|
||||
self.app.get(get_url, headers=FAKE_HEADERS)
|
||||
self.assertEqual(1, mock_db_api.resource_sync_list_by_job.call_count)
|
|
@ -116,9 +116,11 @@ class TestV1Controller(KBApiTest):
|
|||
v1_link = links[0]
|
||||
quota_class_link = links[1]
|
||||
quota_manager_link = links[2]
|
||||
sync_manager_link = links[3]
|
||||
self.assertEqual('self', v1_link['rel'])
|
||||
self.assertEqual('os-quota-sets', quota_manager_link['rel'])
|
||||
self.assertEqual('os-quota-class-sets', quota_class_link['rel'])
|
||||
self.assertEqual('os-sync', sync_manager_link['rel'])
|
||||
|
||||
def _test_method_returns_405(self, method):
|
||||
api_method = getattr(self.app, method)
|
||||
|
|
|
@ -72,9 +72,10 @@ class DBAPIResourceSyncTest(base.KingbirdTestCase):
|
|||
def test_create_sync_job(self):
|
||||
job = self.sync_job_create(self.ctx, job_id=UUID1)
|
||||
self.assertIsNotNone(job)
|
||||
self.assertEqual(consts.SYNC_STATUS, job.sync_status)
|
||||
self.assertEqual(consts.JOB_PROGRESS, job.sync_status)
|
||||
created_job = db_api.sync_job_list(self.ctx, "active")
|
||||
self.assertEqual(consts.SYNC_STATUS, created_job[0].get('sync_status'))
|
||||
self.assertEqual(consts.JOB_PROGRESS,
|
||||
created_job[0].get('sync_status'))
|
||||
|
||||
def test_primary_key_sync_job(self):
|
||||
self.sync_job_create(self.ctx, job_id=UUID1)
|
||||
|
@ -98,7 +99,7 @@ class DBAPIResourceSyncTest(base.KingbirdTestCase):
|
|||
job = self.sync_job_create(self.ctx, job_id=UUID1)
|
||||
self.assertIsNotNone(job)
|
||||
query = db_api.sync_job_status(self.ctx, job_id=UUID1)
|
||||
self.assertEqual(query, consts.SYNC_STATUS)
|
||||
self.assertEqual(query, consts.JOB_PROGRESS)
|
||||
|
||||
def test_update_invalid_job(self):
|
||||
job = self.sync_job_create(self.ctx, job_id=UUID1)
|
||||
|
@ -113,7 +114,7 @@ class DBAPIResourceSyncTest(base.KingbirdTestCase):
|
|||
self.ctx, job=job, region='Fake_region',
|
||||
resource='fake_key', resource_type='keypair')
|
||||
self.assertIsNotNone(resource_sync_create)
|
||||
self.assertEqual(consts.SYNC_STATUS, resource_sync_create.sync_status)
|
||||
self.assertEqual(consts.JOB_PROGRESS, resource_sync_create.sync_status)
|
||||
|
||||
def test_resource_sync_status(self):
|
||||
job = self.sync_job_create(self.ctx, job_id=UUID1)
|
||||
|
@ -122,7 +123,7 @@ class DBAPIResourceSyncTest(base.KingbirdTestCase):
|
|||
resource='fake_key', resource_type='keypair')
|
||||
self.assertIsNotNone(resource_sync_create)
|
||||
status = db_api.resource_sync_status(self.ctx, job.id)
|
||||
self.assertEqual(consts.SYNC_STATUS, status[0])
|
||||
self.assertEqual(consts.JOB_PROGRESS, status[0])
|
||||
|
||||
def test_resource_sync_update(self):
|
||||
job = self.sync_job_create(self.ctx, job_id=UUID1)
|
||||
|
@ -130,7 +131,7 @@ class DBAPIResourceSyncTest(base.KingbirdTestCase):
|
|||
self.ctx, job=job, region='Fake_region',
|
||||
resource='fake_key', resource_type='keypair')
|
||||
self.assertIsNotNone(resource_sync_create)
|
||||
self.assertEqual(consts.SYNC_STATUS, resource_sync_create.sync_status)
|
||||
self.assertEqual(consts.JOB_PROGRESS, resource_sync_create.sync_status)
|
||||
db_api.resource_sync_update(
|
||||
self.ctx, job.id, 'Fake_region', 'fake_key', consts.JOB_SUCCESS)
|
||||
updated_job = db_api.resource_sync_list_by_job(self.ctx, job.id)
|
||||
|
|
|
@ -51,6 +51,14 @@ FAKE_LIMITS = {'absolute':
|
|||
u'maxTotalRAMSize': 51200, u'maxTotalCores': 10
|
||||
}
|
||||
}
|
||||
FAKE_RESOURCE_ID = 'fake_id'
|
||||
DEFAULT_FORCE = False
|
||||
|
||||
|
||||
class FakeKeypair(object):
|
||||
def __init__(self, name, public_key):
|
||||
self.name = name
|
||||
self.public_key = public_key
|
||||
|
||||
|
||||
class TestNovaClient(base.KingbirdTestCase):
|
||||
|
@ -59,6 +67,7 @@ class TestNovaClient(base.KingbirdTestCase):
|
|||
self.ctx = utils.dummy_context()
|
||||
self.session = 'fake_session'
|
||||
self.project = 'fake_project'
|
||||
self.user = 'fake_user'
|
||||
|
||||
def test_init(self):
|
||||
nv_client = nova_v2.NovaClient('fake_region', DISABLED_QUOTAS,
|
||||
|
@ -104,3 +113,36 @@ class TestNovaClient(base.KingbirdTestCase):
|
|||
|
||||
mock_novaclient.Client().quotas.delete.assert_called_once_with(
|
||||
self.project)
|
||||
|
||||
@mock.patch.object(nova_v2, 'client')
|
||||
def test_get_keypairs(self, mock_novaclient):
|
||||
nv_client = nova_v2.NovaClient('fake_region', DISABLED_QUOTAS,
|
||||
self.session)
|
||||
nv_client.get_keypairs(self.user, FAKE_RESOURCE_ID)
|
||||
mock_novaclient.Client().keypairs.get.return_value = 'key1'
|
||||
mock_novaclient.Client().keypairs.get.\
|
||||
assert_called_once_with(FAKE_RESOURCE_ID, self.user)
|
||||
|
||||
@mock.patch.object(nova_v2, 'client')
|
||||
def test_create_keypairs_with_force_false(self, mock_novaclient):
|
||||
nv_client = nova_v2.NovaClient('fake_region', DISABLED_QUOTAS,
|
||||
self.session)
|
||||
fake_key = FakeKeypair('fake_name', 'fake-rsa')
|
||||
nv_client.create_keypairs(DEFAULT_FORCE, fake_key, self.user)
|
||||
self.assertEqual(0,
|
||||
mock_novaclient.Client().keypairs.delete.call_count)
|
||||
mock_novaclient.Client().keypairs.create.\
|
||||
assert_called_once_with(fake_key.name, user_id=self.user,
|
||||
public_key=fake_key.public_key)
|
||||
|
||||
@mock.patch.object(nova_v2, 'client')
|
||||
def test_create_keypairs_with_force_true(self, mock_novaclient):
|
||||
nv_client = nova_v2.NovaClient('fake_region', DISABLED_QUOTAS,
|
||||
self.session)
|
||||
fake_key = FakeKeypair('fake_name', 'fake-rsa')
|
||||
nv_client.create_keypairs(True, fake_key, self.user)
|
||||
mock_novaclient.Client().keypairs.delete.\
|
||||
assert_called_once_with(fake_key, self.user)
|
||||
mock_novaclient.Client().keypairs.create.\
|
||||
assert_called_once_with(fake_key.name, user_id=self.user,
|
||||
public_key=fake_key.public_key)
|
||||
|
|
|
@ -18,6 +18,11 @@ from kingbird.drivers.openstack import sdk
|
|||
from kingbird.tests import base
|
||||
from kingbird.tests import utils
|
||||
|
||||
FAKE_USER_ID = 'user123'
|
||||
FAKE_RESOURCE_ID = 'fake_id'
|
||||
DEFAULT_FORCE = False
|
||||
SOURCE_KEYPAIR = 'fake_key1'
|
||||
|
||||
|
||||
class FakeService(object):
|
||||
|
||||
|
@ -30,6 +35,12 @@ class FakeService(object):
|
|||
self.name = name
|
||||
|
||||
|
||||
class FakeKeypair(object):
|
||||
def __init__(self, name, public_key):
|
||||
self.name = name
|
||||
self.public_key = public_key
|
||||
|
||||
|
||||
class User(object):
|
||||
def __init__(self, user_name, id, enabled=True):
|
||||
self.user_name = user_name
|
||||
|
@ -292,3 +303,53 @@ class TestOpenStackDriver(base.KingbirdTestCase):
|
|||
os_driver = sdk.OpenStackDriver()
|
||||
expected = os_driver._is_token_valid()
|
||||
self.assertEqual(expected, False)
|
||||
|
||||
@mock.patch.object(sdk.OpenStackDriver, 'os_clients_dict')
|
||||
@mock.patch.object(sdk, 'KeystoneClient')
|
||||
@mock.patch.object(sdk, 'NovaClient')
|
||||
@mock.patch.object(sdk, 'NeutronClient')
|
||||
@mock.patch.object(sdk, 'CinderClient')
|
||||
def test_get_keypairs(self, mock_cinder_client,
|
||||
mock_network_client,
|
||||
mock_nova_client, mock_keystone_client,
|
||||
mock_os_client):
|
||||
os_driver = sdk.OpenStackDriver()
|
||||
os_driver.get_keypairs(FAKE_USER_ID, FAKE_RESOURCE_ID)
|
||||
mock_nova_client().get_keypairs.\
|
||||
assert_called_once_with(FAKE_USER_ID, FAKE_RESOURCE_ID)
|
||||
|
||||
@mock.patch.object(sdk.OpenStackDriver, 'os_clients_dict')
|
||||
@mock.patch.object(sdk, 'KeystoneClient')
|
||||
@mock.patch.object(sdk, 'NovaClient')
|
||||
@mock.patch.object(sdk, 'NeutronClient')
|
||||
@mock.patch.object(sdk, 'CinderClient')
|
||||
def test_create_keypairs_with_force_false(self, mock_cinder_client,
|
||||
mock_network_client,
|
||||
mock_nova_client,
|
||||
mock_keystone_client,
|
||||
mock_os_client):
|
||||
os_driver = sdk.OpenStackDriver()
|
||||
fake_key = FakeKeypair('fake_name', 'fake-rsa')
|
||||
os_driver.create_keypairs(False, fake_key,
|
||||
FAKE_USER_ID)
|
||||
mock_nova_client().create_keypairs.\
|
||||
assert_called_once_with(False, fake_key,
|
||||
FAKE_USER_ID)
|
||||
|
||||
@mock.patch.object(sdk.OpenStackDriver, 'os_clients_dict')
|
||||
@mock.patch.object(sdk, 'KeystoneClient')
|
||||
@mock.patch.object(sdk, 'NovaClient')
|
||||
@mock.patch.object(sdk, 'NeutronClient')
|
||||
@mock.patch.object(sdk, 'CinderClient')
|
||||
def test_create_keypairs_with_force_true(self, mock_cinder_client,
|
||||
mock_network_client,
|
||||
mock_nova_client,
|
||||
mock_keystone_client,
|
||||
mock_os_client):
|
||||
os_driver = sdk.OpenStackDriver()
|
||||
fake_key = FakeKeypair('fake_name', 'fake-rsa')
|
||||
os_driver.create_keypairs(True, fake_key,
|
||||
FAKE_USER_ID)
|
||||
mock_nova_client().create_keypairs.\
|
||||
assert_called_once_with(True, fake_key,
|
||||
FAKE_USER_ID)
|
||||
|
|
|
@ -19,6 +19,8 @@ from kingbird.tests import utils
|
|||
from oslo_config import cfg
|
||||
|
||||
CONF = cfg.CONF
|
||||
FAKE_USER = utils.UUID1
|
||||
FAKE_JOB = utils.UUID2
|
||||
|
||||
|
||||
class TestEngineService(base.KingbirdTestCase):
|
||||
|
@ -30,6 +32,9 @@ class TestEngineService(base.KingbirdTestCase):
|
|||
tenant=self.tenant_id)
|
||||
self.service_obj = service.EngineService('kingbird',
|
||||
'kingbird-engine')
|
||||
self.payload = {}
|
||||
self.user_id = FAKE_USER
|
||||
self.job_id = FAKE_JOB
|
||||
|
||||
def test_init(self):
|
||||
self.assertEqual(self.service_obj.host, 'localhost')
|
||||
|
@ -48,6 +53,11 @@ class TestEngineService(base.KingbirdTestCase):
|
|||
self.service_obj.init_qm()
|
||||
self.assertIsNotNone(self.service_obj.qm)
|
||||
|
||||
@mock.patch.object(service, 'SyncManager')
|
||||
def test_init_sm(self, mock_resource_manager):
|
||||
self.service_obj.init_sm()
|
||||
self.assertIsNotNone(self.service_obj.sm)
|
||||
|
||||
@mock.patch.object(service.EngineService, 'service_registry_cleanup')
|
||||
@mock.patch.object(service, 'QuotaManager')
|
||||
@mock.patch.object(service, 'rpc_messaging')
|
||||
|
@ -99,3 +109,13 @@ class TestEngineService(base.KingbirdTestCase):
|
|||
self.service_obj.start()
|
||||
self.service_obj.stop()
|
||||
mock_rpc.get_rpc_server().stop.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(service, 'SyncManager')
|
||||
def test_resource_sync_for_user(self, mock_sync_manager):
|
||||
self.service_obj.init_tgm()
|
||||
self.service_obj.init_sm()
|
||||
self.service_obj.keypair_sync_for_user(
|
||||
self.context, self.user_id,
|
||||
self.job_id, self.payload,)
|
||||
mock_sync_manager().keypair_sync_for_user.\
|
||||
assert_called_once_with(self.user_id, self.job_id, self.payload)
|
||||
|
|
|
@ -0,0 +1,94 @@
|
|||
# Copyright 2017 Ericsson AB
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import mock
|
||||
|
||||
from kingbird.engine import sync_manager
|
||||
from kingbird.tests import base
|
||||
from kingbird.tests import utils
|
||||
|
||||
DEFAULT_FORCE = False
|
||||
SOURCE_KEYPAIR = 'fake_key1'
|
||||
FAKE_USER_ID = 'user123'
|
||||
FAKE_TARGET_REGION = 'fake_target_region'
|
||||
FAKE_SOURCE_REGION = 'fake_source_region'
|
||||
FAKE_RESOURCE_ID = 'fake_id'
|
||||
FAKE_JOB_ID = utils.UUID1
|
||||
|
||||
|
||||
class FakeKeypair(object):
|
||||
def __init__(self, name, public_key):
|
||||
self.name = name
|
||||
self.public_key = public_key
|
||||
|
||||
|
||||
class TestSyncManager(base.KingbirdTestCase):
|
||||
def setUp(self):
|
||||
super(TestSyncManager, self).setUp()
|
||||
self.ctxt = utils.dummy_context()
|
||||
|
||||
@mock.patch.object(sync_manager, 'context')
|
||||
def test_init(self, mock_context):
|
||||
mock_context.get_admin_context.return_value = self.ctxt
|
||||
sm = sync_manager.SyncManager()
|
||||
self.assertIsNotNone(sm)
|
||||
self.assertEqual('sync_manager', sm.service_name)
|
||||
self.assertEqual('localhost', sm.host)
|
||||
self.assertEqual(self.ctxt, sm.context)
|
||||
|
||||
@mock.patch.object(sync_manager, 'sdk')
|
||||
@mock.patch.object(sync_manager.SyncManager, '_create_keypairs')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_keypair_sync_force_false(self, mock_db_api, mock_create_keypair,
|
||||
mock_sdk):
|
||||
payload = {}
|
||||
payload['target'] = [FAKE_TARGET_REGION]
|
||||
payload['force'] = DEFAULT_FORCE
|
||||
payload['source'] = FAKE_SOURCE_REGION
|
||||
payload['resources'] = [SOURCE_KEYPAIR]
|
||||
fake_key = FakeKeypair('fake_name', 'fake-rsa')
|
||||
mock_sdk.OpenStackDriver().get_keypairs.return_value = fake_key
|
||||
sm = sync_manager.SyncManager()
|
||||
sm.keypair_sync_for_user(FAKE_USER_ID, FAKE_JOB_ID, payload)
|
||||
mock_create_keypair.assert_called_once_with(
|
||||
FAKE_JOB_ID, payload['force'], payload['target'][0], fake_key,
|
||||
FAKE_USER_ID)
|
||||
|
||||
@mock.patch.object(sync_manager, 'sdk')
|
||||
@mock.patch.object(sync_manager.SyncManager, '_create_keypairs')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_keypair_sync_force_true(self, mock_db_api, mock_create_keypair,
|
||||
mock_sdk):
|
||||
|
||||
payload = dict()
|
||||
payload['target'] = [FAKE_TARGET_REGION]
|
||||
payload['force'] = True
|
||||
payload['source'] = FAKE_SOURCE_REGION
|
||||
payload['resources'] = [SOURCE_KEYPAIR]
|
||||
fake_key = FakeKeypair('fake_name', 'fake-rsa')
|
||||
mock_sdk.OpenStackDriver().get_keypairs.return_value = fake_key
|
||||
sm = sync_manager.SyncManager()
|
||||
sm.keypair_sync_for_user(FAKE_USER_ID, FAKE_JOB_ID, payload)
|
||||
mock_create_keypair.assert_called_once_with(
|
||||
FAKE_JOB_ID, payload['force'], FAKE_TARGET_REGION, fake_key,
|
||||
FAKE_USER_ID)
|
||||
|
||||
@mock.patch.object(sync_manager, 'sdk')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_create_keypair(self, mock_db_api, mock_sdk):
|
||||
fake_key = FakeKeypair('fake_name', 'fake-rsa')
|
||||
sm = sync_manager.SyncManager()
|
||||
sm._create_keypairs(FAKE_JOB_ID, DEFAULT_FORCE, FAKE_TARGET_REGION,
|
||||
fake_key, FAKE_USER_ID)
|
||||
mock_sdk.OpenStackDriver().create_keypairs.\
|
||||
assert_called_once_with(DEFAULT_FORCE, fake_key, FAKE_USER_ID)
|
Loading…
Reference in New Issue