Add resource_sync_id column in resource_sync table.

Currently in Kingbird there is no id for each resource which is being synced
into multiple regions. But with the help of this resource_sync_id user can
keep a track of all the resource which are binded to a given sync job.
Remaining changes are to enhance kingbird.
Added Test cases for the same.

Change-Id: I4aa422546a2949438328c8fe47e89f3079e2eb65
This commit is contained in:
Goutham Pratapa 2018-01-25 16:47:45 +05:30
parent 01c31c45aa
commit 69223a78f9
19 changed files with 426 additions and 492 deletions

View File

@ -55,24 +55,19 @@ class ResourceSyncController(object):
pass
def _entries_to_database(self, context, target_regions, source_region,
resources, resource_type, job_id, job_name):
resources, resource_type, result):
"""Manage the entries to database for both Keypair and image."""
# Insert into the parent table
try:
result = db_api.sync_job_create(context, job_name, job_id=job_id)
except exceptions.InternalError:
pecan.abort(500, _('Internal Server Error.'))
# Insert into the child table
# Insert into the child table
for region in target_regions:
for resource in resources:
job_id = uuidutils.generate_uuid()
try:
db_api.resource_sync_create(context, result,
region, source_region,
resource, resource_type)
resource, resource_type,
job_id)
except exceptions.JobNotFound:
pecan.abort(404, _('Job not found'))
return result
@index.when(method='GET', template='json')
def get(self, project, action=None):
@ -91,16 +86,12 @@ class ResourceSyncController(object):
result['job_set'] = db_api.sync_job_list(context, action)
elif uuidutils.is_uuid_like(action):
try:
result['job_set'] = db_api.resource_sync_list_by_job_id(
result['job_set'] = db_api.resource_sync_list(
context, action)
except exceptions.JobNotFound:
pecan.abort(404, _('Job not found'))
else:
try:
result['job_set'] = db_api.resource_sync_list_by_job_name(
context, action)
except exceptions.JobNotFound:
pecan.abort(404, _('Job not found'))
pecan.abort(400, _('Invalid request URL'))
return result
@index.when(method='POST', template='json')
@ -118,83 +109,81 @@ class ResourceSyncController(object):
request_data = request_data.get('resource_set')
if not request_data:
pecan.abort(400, _('resource_set required'))
job_name = None
if 'name' in request_data.keys():
job_name = request_data.get('name')
db_api.validate_job_name(context, job_name)
for iteration in range(len(request_data['Sync'])):
payload = request_data['Sync'][iteration]
response = self._get_post_data(payload,
context, job_name)
else:
response = self._get_post_data(request_data,
context, job_name)
parent_job_id = uuidutils.generate_uuid()
try:
result = db_api.sync_job_create(context, parent_job_id)
except exceptions.InternalError:
pecan.abort(500, _('Internal Server Error.'))
response = self._get_post_data(request_data,
context, result)
return response
def _get_post_data(self, payload, context, job_name):
def _get_post_data(self, payload, context, result):
resource_type = payload.get('resource_type')
target_regions = payload.get('target')
force = eval(str(payload.get('force', False)))
source_regions = list()
if not target_regions or not isinstance(target_regions, list):
pecan.abort(400, _('Target regions required'))
source_region = payload.get('source')
if(isinstance(source_region, list)):
source_region = "".join(source_region)
if not source_region or not isinstance(source_region, str):
if not source_region:
pecan.abort(400, _('Source region required'))
if isinstance(source_region, list):
source_regions = source_region
if isinstance(source_region, str):
source_regions.append(source_region)
source_resources = payload.get('resources')
if not source_resources:
pecan.abort(400, _('Source resources required'))
job_id = uuidutils.generate_uuid()
session = EndpointCache().get_session_from_token(
context.auth_token, context.project)
if resource_type == consts.KEYPAIR:
session = EndpointCache().get_session_from_token(
context.auth_token, context.project)
# Create Source Region object
source_nova_client = NovaClient(source_region, session)
# Check for keypairs in Source Region
for source_keypair in source_resources:
source_keypair = source_nova_client.\
get_keypairs(source_keypair)
if not source_keypair:
pecan.abort(404)
result = self._entries_to_database(context, target_regions,
source_region,
source_resources,
resource_type,
job_id, job_name)
return self._keypair_sync(job_id, payload, context, result)
for source in source_regions:
# Create Source Region object
source_nova_client = NovaClient(source, session)
# Check for keypairs in Source Region
for source_keypair in source_resources:
source_keypair = source_nova_client.\
get_keypairs(source_keypair)
if not source_keypair:
db_api._delete_failure_sync_job(context, result.job_id)
pecan.abort(404)
self._entries_to_database(context, target_regions,
source, source_resources,
resource_type, result)
return self._keypair_sync(force, context, result)
elif resource_type == consts.IMAGE:
# Create Source Region glance_object
glance_driver = GlanceClient(source_region, context)
# Check for images in Source Region
for image in source_resources:
source_image = glance_driver.check_image(image)
if image != source_image:
pecan.abort(404)
result = self._entries_to_database(context, target_regions,
source_region,
source_resources,
resource_type,
job_id, job_name)
return self._image_sync(job_id, payload, context, result)
for source in source_regions:
# Create Source Region glance_object
glance_driver = GlanceClient(source, context)
# Check for images in Source Region
for image in source_resources:
source_image = glance_driver.check_image(image)
if image != source_image:
db_api._delete_failure_sync_job(context, result.job_id)
pecan.abort(404)
self._entries_to_database(context, target_regions,
source, source_resources,
resource_type, result)
return self._image_sync(force, context, result)
elif resource_type == consts.FLAVOR:
if not context.is_admin:
db_api._delete_failure_sync_job(context, result.job_id)
pecan.abort(403, _('Admin required'))
session = EndpointCache().get_session_from_token(
context.auth_token, context.project)
# Create Source Region object
source_nova_client = NovaClient(source_region, session)
for flavor in source_resources:
source_flavor = source_nova_client.get_flavor(flavor)
if not source_flavor:
pecan.abort(404)
result = self._entries_to_database(context, target_regions,
source_region,
source_resources,
resource_type,
job_id, job_name)
return self._flavor_sync(job_id, payload, context, result)
for source in source_regions:
# Create Source Region object
source_nova_client = NovaClient(source, session)
for flavor in source_resources:
source_flavor = source_nova_client.get_flavor(flavor)
if not source_flavor:
db_api._delete_failure_sync_job(context, result.job_id)
pecan.abort(404)
self._entries_to_database(context, target_regions,
source, source_resources,
resource_type, result)
return self._flavor_sync(force, context, result)
else:
pecan.abort(400, _('Bad resource_type'))
@ -226,7 +215,7 @@ class ResourceSyncController(object):
else:
pecan.abort(400, _('Bad request'))
def _keypair_sync(self, job_id, payload, context, result):
def _keypair_sync(self, force, context, result):
"""Make an rpc call to kb-engine.
:param job_id: ID of the job to update values in database based on
@ -235,12 +224,12 @@ class ResourceSyncController(object):
:param context: context of the request.
:param result: Result object to return an output.
"""
self.rpc_client.keypair_sync_for_user(context, job_id, payload)
return {'job_status': {'name': result.name, 'id': result.id,
self.rpc_client.keypair_sync_for_user(context, result.job_id, force)
return {'job_status': {'id': result.job_id,
'status': result.sync_status,
'created_at': result.created_at}}
def _image_sync(self, job_id, payload, context, result):
def _image_sync(self, force, context, result):
"""Make an rpc call to engine.
:param job_id: ID of the job to update values in database based on
@ -249,12 +238,12 @@ class ResourceSyncController(object):
:param context: context of the request.
:param result: Result object to return an output.
"""
self.rpc_client.image_sync(context, job_id, payload)
return {'job_status': {'name': result.name, 'id': result.id,
self.rpc_client.image_sync(context, result.job_id, force)
return {'job_status': {'id': result.job_id,
'status': result.sync_status,
'created_at': result.created_at}}
def _flavor_sync(self, job_id, payload, context, result):
def _flavor_sync(self, force, context, result):
"""Make an rpc call to engine.
:param job_id: ID of the job to update values in database based on
@ -263,7 +252,7 @@ class ResourceSyncController(object):
:param context: context of the request.
:param result: Result object to return an output.
"""
self.rpc_client.flavor_sync(context, job_id, payload)
return {'job_status': {'name': result.name, 'id': result.id,
self.rpc_client.flavor_sync(context, result.job_id, force)
return {'job_status': {'id': result.job_id,
'status': result.sync_status,
'created_at': result.created_at}}

View File

@ -383,12 +383,10 @@ def service_get_all(context):
##########################
@require_context
def sync_job_create(context, job_name, job_id):
def sync_job_create(context, job_id):
with write_session() as session:
sjc = models.SyncJob()
if job_name is not None:
sjc.name = job_name
sjc.id = job_id
sjc.job_id = job_id
sjc.user_id = context.user
sjc.project_id = context.project
session.add(sjc)
@ -409,8 +407,7 @@ def sync_job_list(context, action=None):
output = list()
for row in rows:
result = dict()
result['id'] = row.id
result['name'] = row.name
result['id'] = row.job_id
result['sync_status'] = row.sync_status
result['created_at'] = row.created_at
if row.updated_at:
@ -424,7 +421,7 @@ def sync_job_list(context, action=None):
@require_context
def sync_job_status(context, job_id):
row = model_query(context, models.SyncJob).\
filter_by(id=job_id, user_id=context.user,
filter_by(job_id=job_id, user_id=context.user,
project_id=context.project).first()
if not row:
raise exception.JobNotFound()
@ -436,7 +433,7 @@ def sync_job_status(context, job_id):
def sync_job_update(context, job_id, status):
with write_session() as session:
sync_job_ref = session.query(models.SyncJob). \
filter_by(id=job_id).first()
filter_by(job_id=job_id).first()
if not sync_job_ref:
raise exception.JobNotFound()
values = dict()
@ -448,11 +445,11 @@ def sync_job_update(context, job_id, status):
def sync_job_delete(context, job_id):
with write_session() as session:
parent_job = model_query(context, models.SyncJob). \
filter_by(id=job_id, user_id=context.user,
filter_by(job_id=job_id, user_id=context.user,
project_id=context.project).first()
if parent_job:
child_jobs = model_query(context, models.ResourceSync). \
filter_by(job_id=parent_job.id).all()
filter_by(job_id=parent_job.job_id).all()
if not child_jobs:
raise exception.JobNotFound()
for child_job in child_jobs:
@ -462,15 +459,28 @@ def sync_job_delete(context, job_id):
raise exception.JobNotFound()
@require_context
def _delete_failure_sync_job(context, job_id):
with write_session() as session:
parent_job = model_query(context, models.SyncJob). \
filter_by(job_id=job_id, user_id=context.user,
project_id=context.project).first()
if parent_job:
session.delete(parent_job)
else:
raise exception.JobNotFound()
##########################
@require_context
def resource_sync_create(context, job, region, source_region,
resource, resource_type):
resource, resource_type, job_id):
if not job:
raise exception.JobNotFound()
with write_session() as session:
rsc = models.ResourceSync()
rsc.sync_job = job
rsc.resource_sync_id = job_id
rsc.resource = resource
rsc.target_region = region
rsc.source_region = source_region
@ -480,11 +490,10 @@ def resource_sync_create(context, job, region, source_region,
@require_context
def resource_sync_update(context, job_id, region, resource, status):
def resource_sync_update(context, job_id, status):
with write_session() as session:
resource_sync_ref = session.query(models.ResourceSync).\
filter_by(job_id=job_id, target_region=region, resource=resource).\
first()
filter_by(resource_sync_id=job_id).first()
if not resource_sync_ref:
raise exception.JobNotFound()
values = dict()
@ -506,47 +515,25 @@ def resource_sync_status(context, job_id):
@require_context
def resource_sync_list_by_job_name(context, job_name):
final_response = list()
def resource_sync_list(context, job_id, resource_type=None):
parent_row = model_query(context, models.SyncJob).\
filter_by(name=job_name, user_id=context.user,
project_id=context.project).all()
if not parent_row:
raise exception.JobNotFound()
for iteration in range(len(parent_row)):
rows = model_query(context, models.ResourceSync).\
filter_by(job_id=parent_row[iteration].id).all()
final_response = final_response + sync_individual_resource(rows)
return final_response
def validate_job_name(context, job_name):
parent_row = model_query(context, models.SyncJob).\
filter_by(name=job_name, user_id=context.user,
project_id=context.project).all()
if parent_row:
raise exception.DuplicateJobEntry()
@require_context
def resource_sync_list_by_job_id(context, job_id):
parent_row = model_query(context, models.SyncJob).\
filter_by(id=job_id, user_id=context.user,
filter_by(job_id=job_id, user_id=context.user,
project_id=context.project).first()
if not parent_row:
raise exception.JobNotFound()
rows = model_query(context, models.ResourceSync).\
filter_by(job_id=parent_row.id).all()
return sync_individual_resource(rows)
def sync_individual_resource(rows):
if resource_type:
rows = model_query(context, models.ResourceSync).\
filter_by(job_id=parent_row.job_id,
resource_type=resource_type).all()
else:
rows = model_query(context, models.ResourceSync).\
filter_by(job_id=parent_row.job_id).all()
output = list()
if not rows:
raise exception.JobNotFound()
for row in rows:
result = dict()
result['id'] = row.job_id
result['id'] = row.resource_sync_id
result['target_region'] = row.target_region
result['source_region'] = row.source_region
result['resource'] = row.resource

View File

@ -22,8 +22,7 @@ def upgrade(migrate_engine):
sync_job = sqlalchemy.Table(
'sync_job', meta,
sqlalchemy.Column('name', sqlalchemy.String(255)),
sqlalchemy.Column('id', sqlalchemy.String(36),
sqlalchemy.Column('job_id', sqlalchemy.String(36),
primary_key=True),
sqlalchemy.Column('sync_status', sqlalchemy.String(length=36),
default=consts.JOB_PROGRESS, nullable=False),
@ -42,7 +41,9 @@ def upgrade(migrate_engine):
resource_sync = sqlalchemy.Table(
'resource_sync', meta,
sqlalchemy.Column('job_id', sqlalchemy.String(36),
sqlalchemy.ForeignKey('sync_job.id'),
sqlalchemy.ForeignKey('sync_job.job_id'),
primary_key=True),
sqlalchemy.Column('resource_sync_id', sqlalchemy.String(36),
primary_key=True),
sqlalchemy.Column('source_region', sqlalchemy.String(36),
primary_key=True),

View File

@ -154,9 +154,7 @@ class SyncJob(BASE, KingbirdBase):
__tablename__ = 'sync_job'
name = Column('name', String(255))
id = Column('id', String(36), primary_key=True)
job_id = Column('job_id', String(36), primary_key=True)
sync_status = Column(String(36), default=consts.JOB_PROGRESS,
nullable=False)
@ -174,7 +172,9 @@ class ResourceSync(BASE, KingbirdBase):
__tablename__ = 'resource_sync'
job_id = Column('job_id', String(36),
ForeignKey('sync_job.id'), primary_key=True)
ForeignKey('sync_job.job_id'), primary_key=True)
resource_sync_id = Column('resource_sync_id', String(36), primary_key=True)
source_region = Column('source_region', String(36), primary_key=True)

View File

@ -160,7 +160,7 @@ class NovaClient(base.DriverBase):
resource_flavor):
"""Check for the flavor and then delete it."""
try:
target_flavor = self.nova_client.flavors.get(flavor.id)
target_flavor = self.get_flavor(flavor.name)
if target_flavor:
resource_flavor.pop("flavorid", None)
flavor_list = self.nova_client.flavors.list(is_public=None)

View File

@ -32,23 +32,9 @@ class FlavorSyncManager(object):
def __init__(self, *args, **kwargs):
super(FlavorSyncManager, self).__init__()
def create_resources_in_region(self, job_id, force, target_regions,
def create_resources_in_region(self, job_id, force, region,
source_flavor, session, context,
access_tenants=None):
"""Create Region specific threads."""
regions_thread = list()
for region in target_regions:
thread = threading.Thread(target=self.create_resources,
args=(job_id, force, region,
source_flavor, session,
context, access_tenants))
regions_thread.append(thread)
thread.start()
for region_thread in regions_thread:
region_thread.join()
def create_resources(self, job_id, force, region, source_flavor,
session, context, access_tenants=None):
"""Create resources using threads."""
target_nova_client = NovaClient(region, session)
try:
@ -57,8 +43,7 @@ class FlavorSyncManager(object):
LOG.info('Flavor %(flavor)s created in %(region)s'
% {'flavor': source_flavor.name, 'region': region})
try:
db_api.resource_sync_update(context, job_id, region,
source_flavor.name,
db_api.resource_sync_update(context, job_id,
consts.JOB_SUCCESS)
except exceptions.JobNotFound():
raise
@ -66,14 +51,13 @@ class FlavorSyncManager(object):
LOG.error('Exception Occurred: %(msg)s in %(region)s'
% {'msg': exc.message, 'region': region})
try:
db_api.resource_sync_update(context, job_id, region,
source_flavor.name,
db_api.resource_sync_update(context, job_id,
consts.JOB_FAILURE)
except exceptions.JobNotFound():
raise
pass
def resource_sync(self, context, job_id, payload):
def resource_sync(self, context, job_id, force):
"""Create resources in target regions.
:param context: request context object.
@ -83,29 +67,35 @@ class FlavorSyncManager(object):
LOG.info("Triggered Flavor Sync.")
flavors_thread = list()
access_tenants = None
target_regions = payload['target']
force = eval(str(payload.get('force', False)))
resource_ids = payload.get('resources')
source_region = payload['source']
if(isinstance(source_region, list)):
source_region = "".join(source_region)
session = EndpointCache().get_session_from_token(
context.auth_token, context.project)
# Create Source Region object
source_nova_client = NovaClient(source_region, session)
for flavor in resource_ids:
source_flavor = source_nova_client.get_flavor(flavor)
if not source_flavor.is_public:
access_tenants = source_nova_client.\
get_flavor_access_tenant(flavor)
thread = threading.Thread(target=self.create_resources_in_region,
args=(job_id, force, target_regions,
source_flavor, session,
context, access_tenants))
flavors_thread.append(thread)
thread.start()
for flavor_thread in flavors_thread:
flavor_thread.join()
try:
resource_jobs = db_api.resource_sync_list(context, job_id,
consts.FLAVOR)
except exceptions.JobNotFound():
raise
source_regions = [i["source_region"] for i in resource_jobs]
unique_source = list(set(source_regions))
for source_object in unique_source:
source_nova_client = NovaClient(source_object, session)
for resource_job in resource_jobs:
source_flavor = source_nova_client.\
get_flavor(resource_job["resource"])
if not source_flavor.is_public:
access_tenants = source_nova_client.\
get_flavor_access_tenant(resource_job["resource"])
thread = threading.Thread(
target=self.create_resources_in_region,
args=(resource_job["id"], force,
resource_job["target_region"],
source_flavor, session, context, access_tenants))
flavors_thread.append(thread)
thread.start()
for flavor_thread in flavors_thread:
flavor_thread.join()
# Update Result in DATABASE.
try:
resource_sync_details = db_api.\
resource_sync_status(context, job_id)

View File

@ -33,46 +33,31 @@ class ImageSyncManager(object):
def __init__(self, *args, **kwargs):
super(ImageSyncManager, self).__init__()
def create_resources_in_region(self, job_id, target_regions,
def create_resources_in_region(self, job_id, target_region,
source_region, context, resource, force):
"""Create Region Specific threads."""
regions_thread = list()
for region in target_regions:
thread = threading.Thread(target=self.create_resources,
args=(job_id, region, source_region,
context, resource, force))
regions_thread.append(thread)
thread.start()
for region_thread in regions_thread:
region_thread.join()
def create_resources(self, job_id, region, source_region, context,
resource, force):
"""Check dependent images and create resources in target regions."""
source_glance_client = GlanceClient(source_region, context)
target_glance_client = GlanceClient(region, context)
target_glance_client = GlanceClient(target_region, context)
dependent_images = glance_adapter.check_dependent_images(
context, source_region, resource)
if dependent_images is not None:
result = self.create_dependent_image(
resource, dependent_images, target_glance_client,
source_glance_client, region, force)
self.update_result_in_database(context, job_id, region, resource,
result)
source_glance_client, target_region, force)
self.update_result_in_database(context, job_id, target_region,
resource, result)
else:
result = self.create_independent_image(
resource, target_glance_client, source_glance_client,
region, force)
self.update_result_in_database(context, job_id, region, resource,
result)
target_region, force)
self.update_result_in_database(context, job_id, target_region,
resource, result)
def update_result_in_database(self, context, job_id, region, resource,
result):
"""Update result in database based on the sync operation."""
job_result = consts.JOB_SUCCESS if result else consts.JOB_FAILURE
try:
db_api.resource_sync_update(context, job_id, region,
resource, job_result)
db_api.resource_sync_update(context, job_id, job_result)
except exceptions.JobNotFound():
raise
pass
@ -167,7 +152,7 @@ class ImageSyncManager(object):
% {'msg': exc.message, 'region': region})
return False
def resource_sync(self, context, job_id, payload):
def resource_sync(self, context, job_id, force):
"""Create resources in target regions.
Image with same id is created in target_regions and therefore
@ -182,21 +167,26 @@ class ImageSyncManager(object):
"""
LOG.info('Triggered image sync.')
images_thread = list()
target_regions = payload['target']
force = eval(str(payload.get('force', False)))
resource_ids = payload.get('resources')
source_region = payload['source']
if(isinstance(source_region, list)):
source_region = "".join(source_region)
for resource in resource_ids:
thread = threading.Thread(target=self.create_resources_in_region,
args=(job_id, target_regions,
source_region, context,
resource, force))
images_thread.append(thread)
thread.start()
for image_thread in images_thread:
image_thread.join()
try:
resource_jobs = db_api.resource_sync_list(context, job_id,
consts.IMAGE)
except exceptions.JobNotFound():
raise
source_regions = [i["source_region"] for i in resource_jobs]
unique_source = list(set(source_regions))
for source_object in unique_source:
for resource_job in resource_jobs:
thread = threading.Thread(
target=self.create_resources_in_region,
args=(resource_job["id"], resource_job["target_region"],
source_object, context, resource_job["resource"],
force))
images_thread.append(thread)
thread.start()
for image_thread in images_thread:
image_thread.join()
# Update Result in DATABASE.
try:
resource_sync_details = db_api.\
resource_sync_status(context, job_id)

View File

@ -32,22 +32,8 @@ class KeypairSyncManager(object):
def __init__(self, *args, **kwargs):
super(KeypairSyncManager, self).__init__()
def create_resources_in_region(self, job_id, force, target_regions,
def create_resources_in_region(self, job_id, force, region,
source_keypair, session, context):
"""Create Region specific threads."""
regions_thread = list()
for region in target_regions:
thread = threading.Thread(target=self.create_resources,
args=(job_id, force, region,
source_keypair, session,
context))
regions_thread.append(thread)
thread.start()
for region_thread in regions_thread:
region_thread.join()
def create_resources(self, job_id, force, region, source_keypair,
session, context):
"""Create resources using threads."""
target_nova_client = NovaClient(region, session)
try:
@ -55,8 +41,7 @@ class KeypairSyncManager(object):
LOG.info('keypair %(keypair)s created in %(region)s'
% {'keypair': source_keypair.name, 'region': region})
try:
db_api.resource_sync_update(context, job_id, region,
source_keypair.name,
db_api.resource_sync_update(context, job_id,
consts.JOB_SUCCESS)
except exceptions.JobNotFound():
raise
@ -64,14 +49,13 @@ class KeypairSyncManager(object):
LOG.error('Exception Occurred: %(msg)s in %(region)s'
% {'msg': exc.message, 'region': region})
try:
db_api.resource_sync_update(context, job_id, region,
source_keypair.name,
db_api.resource_sync_update(context, job_id,
consts.JOB_FAILURE)
except exceptions.JobNotFound():
raise
pass
def resource_sync(self, context, job_id, payload):
def resource_sync(self, context, job_id, force):
"""Create resources in target regions.
:param context: request context object.
@ -80,26 +64,31 @@ class KeypairSyncManager(object):
"""
LOG.info("Triggered Keypair Sync.")
keypairs_thread = list()
target_regions = payload['target']
force = eval(str(payload.get('force', False)))
resource_ids = payload.get('resources')
source_region = payload['source']
if(isinstance(source_region, list)):
source_region = "".join(source_region)
session = EndpointCache().get_session_from_token(
context.auth_token, context.project)
# Create Source Region object
source_nova_client = NovaClient(source_region, session)
for keypair in resource_ids:
source_keypair = source_nova_client.get_keypairs(keypair)
thread = threading.Thread(target=self.create_resources_in_region,
args=(job_id, force, target_regions,
source_keypair, session,
context,))
keypairs_thread.append(thread)
thread.start()
for keypair_thread in keypairs_thread:
keypair_thread.join()
try:
resource_jobs = db_api.resource_sync_list(context, job_id,
consts.KEYPAIR)
except exceptions.JobNotFound():
raise
source_regions = [i["source_region"] for i in resource_jobs]
unique_source = list(set(source_regions))
for source_object in unique_source:
source_nova_client = NovaClient(source_object, session)
for resource_job in resource_jobs:
source_keypair = source_nova_client.\
get_keypairs(resource_job["resource"])
thread = threading.Thread(
target=self.create_resources_in_region,
args=(resource_job["id"], force,
resource_job["target_region"],
source_keypair, session, context,))
keypairs_thread.append(thread)
thread.start()
for keypair_thread in keypairs_thread:
keypair_thread.join()
# Update Result in DATABASE.
try:
resource_sync_details = db_api.\
resource_sync_status(context, job_id)

View File

@ -166,19 +166,19 @@ class EngineService(service.Service):
self.qm.quota_sync_for_project(project_id)
@request_context
def keypair_sync_for_user(self, ctxt, job_id, payload):
def keypair_sync_for_user(self, ctxt, job_id, force):
# Keypair Sync for a user, will be triggered by KB-API
self.ksm.resource_sync(ctxt, job_id, payload)
self.ksm.resource_sync(ctxt, job_id, force)
@request_context
def image_sync(self, ctxt, job_id, payload):
def image_sync(self, ctxt, job_id, force):
# Image Sync triggered by KB_API.
self.ism.resource_sync(ctxt, job_id, payload)
self.ism.resource_sync(ctxt, job_id, force)
@request_context
def flavor_sync(self, ctxt, job_id, payload):
def flavor_sync(self, ctxt, job_id, force):
# Flavor Sync triggered by KB_API.
self.fsm.resource_sync(ctxt, job_id, payload)
self.fsm.resource_sync(ctxt, job_id, force)
def _stop_rpc_server(self):
# Stop RPC connection to prevent new requests

View File

@ -69,18 +69,18 @@ class EngineClient(object):
return self.cast(ctxt, self.make_msg('quota_sync_for_project',
project_id=project_id))
def keypair_sync_for_user(self, ctxt, job_id, payload):
def keypair_sync_for_user(self, ctxt, job_id, force):
return self.cast(
ctxt,
self.make_msg('keypair_sync_for_user', job_id=job_id,
payload=payload))
force=force))
def image_sync(self, ctxt, job_id, payload):
def image_sync(self, ctxt, job_id, force):
return self.cast(
ctxt,
self.make_msg('image_sync', job_id=job_id, payload=payload))
self.make_msg('image_sync', job_id=job_id, force=force))
def flavor_sync(self, ctxt, job_id, payload):
def flavor_sync(self, ctxt, job_id, force):
return self.cast(
ctxt,
self.make_msg('flavor_sync', job_id=job_id, payload=payload))
self.make_msg('flavor_sync', job_id=job_id, force=force))

View File

@ -53,7 +53,7 @@ class TestQuotaManager(testroot.KBApiTest):
@mock.patch.object(quota_manager, 'db_api')
def test_get_all_admin(self, mock_db_api):
updated_values = {'subnet': 11}
default_values = tempest_consts.DEFAULT_QUOTAS
default_values = dict(tempest_consts.DEFAULT_QUOTAS)
fake_url = '/v1.0/%s/os-quota-sets/%s'\
% (FAKE_TENANT, FAKE_TENANT)
mock_db_api.quota_get_all_by_project.return_value = updated_values
@ -68,7 +68,7 @@ class TestQuotaManager(testroot.KBApiTest):
@mock.patch.object(quota_manager, 'db_api')
def test_get_tenant_with_admin(self, mock_db_api):
updated_values = {'subnet': 11}
default_values = tempest_consts.DEFAULT_QUOTAS
default_values = dict(tempest_consts.DEFAULT_QUOTAS)
fake_url = '/v1.0/%s/os-quota-sets/%s'\
% (FAKE_TENANT, TARGET_FAKE_TENANT)
mock_db_api.quota_get_all_by_project.return_value = updated_values
@ -83,7 +83,7 @@ class TestQuotaManager(testroot.KBApiTest):
@mock.patch.object(quota_manager, 'db_api')
def test_get_tenant_without_admin(self, mock_db_api):
updated_values = {'subnet': 11}
default_values = tempest_consts.DEFAULT_QUOTAS
default_values = dict(tempest_consts.DEFAULT_QUOTAS)
fake_url = '/v1.0/%s/os-quota-sets/%s'\
% (TARGET_FAKE_TENANT, TARGET_FAKE_TENANT)
mock_db_api.quota_get_all_by_project.return_value = updated_values

View File

@ -52,7 +52,7 @@ class TestQuotaManager(testroot.KBApiTest):
@mock.patch.object(quota_manager, 'enf')
def test_get_quota_details(self, mock_enf, mock_db_api):
updated_values = {'subnet': 11}
default_values = tempest_consts.DEFAULT_QUOTAS
default_values = dict(tempest_consts.DEFAULT_QUOTAS)
fake_url = '/v1.1/%s/os-quota-sets/'\
% (FAKE_TENANT)
mock_db_api.quota_get_all_by_project.return_value = updated_values

View File

@ -79,9 +79,8 @@ class Result(object):
class SyncJob(object):
def __init__(self, id, name, sync_status, created_at):
self.id = id
self.name = name
def __init__(self, id, sync_status, created_at):
self.job_id = id
self.sync_status = sync_status
self.created_at = created_at
@ -97,16 +96,17 @@ class TestResourceManager(testroot.KBApiTest):
@mock.patch.object(sync_manager, 'db_api')
def test_post_request_data(self, mock_db_api, mock_endpoint_cache,
mock_nova, mock_rpc_client):
time_now = timeutils.utcnow()
payload = {"resources": [SOURCE_KEYPAIR],
"resource_type": "keypair",
"source": FAKE_SOURCE_REGION,
"target": [FAKE_TARGET_REGION]}
mock_db_api.validate_job_name(self.ctx, JOB_NAME)
self.assertEqual(1,
mock_db_api.validate_job_name
.call_count)
sync_job_result = SyncJob(FAKE_JOB,
consts.JOB_PROGRESS, time_now)
mock_db_api.sync_job_create.return_value = sync_job_result
result = sync_manager.ResourceSyncController().\
_get_post_data(payload, self.ctx, JOB_NAME)
_get_post_data(payload, self.ctx, sync_job_result)
self.assertEqual(1, mock_db_api.resource_sync_create.call_count)
self.assertEqual(result['job_status'].get('status'),
mock_db_api.sync_job_create().sync_status)
@ -123,7 +123,7 @@ class TestResourceManager(testroot.KBApiTest):
"source": FAKE_SOURCE_REGION,
"target": [FAKE_TARGET_REGION]}}
fake_key = FakeKeypair('fake_name', 'fake-rsa')
sync_job_result = SyncJob(JOB_NAME, FAKE_JOB,
sync_job_result = SyncJob(FAKE_JOB,
consts.JOB_PROGRESS, time_now)
mock_endpoint_cache().get_session_from_token.\
return_value = 'fake_session'
@ -154,7 +154,7 @@ class TestResourceManager(testroot.KBApiTest):
"target": [FAKE_TARGET_REGION]}}
fake_flavor = Fake_Flavor('fake_id', 512, 2,
30, 'fake_flavor', 1)
sync_job_result = SyncJob(JOB_NAME, FAKE_JOB,
sync_job_result = SyncJob(FAKE_JOB,
consts.JOB_PROGRESS, time_now)
mock_endpoint_cache().get_session_from_token.\
return_value = 'fake_session'
@ -172,10 +172,12 @@ class TestResourceManager(testroot.KBApiTest):
self.assertEqual(response.status_int, 200)
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'EndpointCache')
@mock.patch.object(sync_manager, 'NovaClient')
@mock.patch.object(sync_manager, 'db_api')
def test_post_flavor_sync_non_admin(self, mock_db_api,
mock_nova, mock_rpc_client):
mock_nova, mock_endpoint,
mock_rpc_client):
data = {"resource_set": {"resources": [SOURCE_FLAVOR],
"resource_type": "flavor",
"force": "True",
@ -188,8 +190,10 @@ class TestResourceManager(testroot.KBApiTest):
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'GlanceClient')
@mock.patch.object(sync_manager, 'EndpointCache')
@mock.patch.object(sync_manager, 'db_api')
def test_post_image_sync(self, mock_db_api, mock_glance, mock_rpc_client):
def test_post_image_sync(self, mock_db_api, mock_endpoint,
mock_glance, mock_rpc_client):
time_now = timeutils.utcnow()
data = {"resource_set": {"resources": [SOURCE_IMAGE_ID],
"resource_type": "image",
@ -197,7 +201,7 @@ class TestResourceManager(testroot.KBApiTest):
"source": FAKE_SOURCE_REGION,
"target": [FAKE_TARGET_REGION]}}
fake_image = FakeImage(SOURCE_IMAGE_ID, SOURCE_IMAGE_NAME)
sync_job_result = SyncJob(JOB_NAME, FAKE_JOB,
sync_job_result = SyncJob(FAKE_JOB,
consts.JOB_PROGRESS, time_now)
mock_glance().check_image.return_value = fake_image.id
mock_db_api.sync_job_create.return_value = sync_job_result
@ -239,7 +243,8 @@ class TestResourceManager(testroot.KBApiTest):
headers=FAKE_HEADERS, params=data)
@mock.patch.object(rpc_client, 'EngineClient')
def test_post_no_target_regions(self, mock_rpc_client):
@mock.patch.object(sync_manager, 'db_api')
def test_post_no_target_regions(self, mock_db, mock_rpc_client):
data = {"resource_set": {"resources": [SOURCE_KEYPAIR],
"force": "True",
"source": FAKE_SOURCE_REGION}}
@ -248,7 +253,9 @@ class TestResourceManager(testroot.KBApiTest):
headers=FAKE_HEADERS, params=data)
@mock.patch.object(rpc_client, 'EngineClient')
def test_post_no_source_regions(self, mock_rpc_client):
@mock.patch.object(sync_manager, 'db_api')
def test_post_no_source_regions(self, mock_db,
mock_rpc_client):
data = {"resource_set": {"resources": [SOURCE_KEYPAIR],
"force": "True",
"target": [FAKE_TARGET_REGION]}}
@ -257,7 +264,10 @@ class TestResourceManager(testroot.KBApiTest):
headers=FAKE_HEADERS, params=data)
@mock.patch.object(rpc_client, 'EngineClient')
def test_post_no_resources_in_body(self, mock_rpc_client):
@mock.patch.object(sync_manager, 'EndpointCache')
@mock.patch.object(sync_manager, 'db_api')
def test_post_no_resources_in_body(self, mock_db, mock_endpoint_cache,
mock_rpc_client):
data = {"resource_set": {"force": "True",
"source": FAKE_SOURCE_REGION,
"target": [FAKE_TARGET_REGION]}}
@ -266,7 +276,10 @@ class TestResourceManager(testroot.KBApiTest):
headers=FAKE_HEADERS, params=data)
@mock.patch.object(rpc_client, 'EngineClient')
def test_post_no_resource_type(self, mock_rpc_client):
@mock.patch.object(sync_manager, 'EndpointCache')
@mock.patch.object(sync_manager, 'db_api')
def test_post_no_resource_type(self, mock_db, mock_endpoint_cache,
mock_rpc_client):
data = {"resource_set": {"resources": [SOURCE_KEYPAIR],
"force": "True",
"source": FAKE_SOURCE_REGION,
@ -278,7 +291,9 @@ class TestResourceManager(testroot.KBApiTest):
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'EndpointCache')
@mock.patch.object(sync_manager, 'NovaClient')
def test_post_no_keypairs_in_region(self, mock_nova, mock_endpoint_cache,
@mock.patch.object(sync_manager, 'db_api')
def test_post_no_keypairs_in_region(self, mock_db, mock_nova,
mock_endpoint_cache,
mock_rpc_client):
data = {"resource_set": {"resources": [SOURCE_KEYPAIR],
"resource_type": "keypair",
@ -293,8 +308,11 @@ class TestResourceManager(testroot.KBApiTest):
headers=FAKE_HEADERS, params=data)
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'EndpointCache')
@mock.patch.object(sync_manager, 'GlanceClient')
def test_post_no_images_in_source_region(self, mock_glance,
@mock.patch.object(sync_manager, 'db_api')
def test_post_no_images_in_source_region(self, mock_db, mock_glance,
mock_endpoint_cache,
mock_rpc_client):
data = {"resource_set": {"resources": [SOURCE_IMAGE_ID],
"resource_type": "image",
@ -310,7 +328,8 @@ class TestResourceManager(testroot.KBApiTest):
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'EndpointCache')
@mock.patch.object(sync_manager, 'NovaClient')
def test_post_no_flavors_in_source_region(self, mock_nova,
@mock.patch.object(sync_manager, 'db_api')
def test_post_no_flavors_in_source_region(self, mock_db, mock_nova,
mock_endpoint_cache,
mock_rpc_client):
data = {"resource_set": {"resources": [SOURCE_FLAVOR],
@ -382,27 +401,21 @@ class TestResourceManager(testroot.KBApiTest):
get_url = FAKE_URL + '/' + FAKE_JOB
self.app.get(get_url, headers=FAKE_HEADERS)
self.assertEqual(1,
mock_db_api.resource_sync_list_by_job_id
mock_db_api.resource_sync_list
.call_count)
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'db_api')
def test_get_detail_job_by_name(self, mock_db_api, mock_rpc_client):
get_url = FAKE_URL + '/' + JOB_NAME
self.app.get(get_url, headers=FAKE_HEADERS)
self.assertEqual(1,
mock_db_api.resource_sync_list_by_job_name
.call_count)
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'db_api')
def test_entries_to_database(self, mock_db_api, mock_rpc_client):
@mock.patch.object(sync_manager, 'uuidutils')
def test_entries_to_database(self, mock_uuid, mock_db_api,
mock_rpc_client):
time_now = timeutils.utcnow()
result = Result(JOB_NAME, FAKE_JOB, FAKE_STATUS, time_now)
mock_db_api.sync_job_create.return_value = result
mock_uuid.generate_uuid.return_value = FAKE_JOB
sync_manager.ResourceSyncController()._entries_to_database(
self.ctx, FAKE_TARGET_REGION, FAKE_SOURCE_REGION,
FAKE_RESOURCE_ID, FAKE_RESOURCE_TYPE, FAKE_TENANT, JOB_NAME)
FAKE_RESOURCE_ID, FAKE_RESOURCE_TYPE, result)
mock_db_api.resource_sync_create.assert_called_once_with(
self.ctx, result, FAKE_TARGET_REGION[0], FAKE_SOURCE_REGION,
FAKE_RESOURCE_ID[0], FAKE_RESOURCE_TYPE)
FAKE_RESOURCE_ID[0], FAKE_RESOURCE_TYPE, FAKE_JOB)

View File

@ -24,7 +24,6 @@ from kingbird.common import consts
from kingbird.common import exceptions
from kingbird.db import api as api
from kingbird.db.sqlalchemy import api as db_api
from kingbird.db.sqlalchemy import models as db_models
from kingbird.tests import base
from kingbird.tests import utils
@ -70,8 +69,7 @@ class DBAPIResourceSyncTest(base.KingbirdTestCase):
self.ctx = utils.dummy_context()
def test_create_sync_job(self):
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
self.assertIsNotNone(job)
self.assertEqual(consts.JOB_PROGRESS, job.sync_status)
created_job = db_api.sync_job_list(self.ctx, "active")
@ -79,117 +77,107 @@ class DBAPIResourceSyncTest(base.KingbirdTestCase):
created_job[0].get('sync_status'))
def test_primary_key_sync_job(self):
self.sync_job_create(self.ctx, job_name='fake_job_name', job_id=UUID1)
self.sync_job_create(self.ctx, job_id=UUID1)
self.assertRaises(oslo_db.exception.DBDuplicateEntry,
self.sync_job_create, self.ctx,
job_name='fake_job_name', job_id=UUID1)
job_id=UUID1)
def test_sync_job_update(self):
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
self.assertIsNotNone(job)
db_api.sync_job_update(self.ctx, UUID1, consts.JOB_SUCCESS)
updated_job = db_api.sync_job_list(self.ctx)
self.assertEqual(consts.JOB_SUCCESS, updated_job[0].get('sync_status'))
def test_active_jobs(self):
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
self.assertIsNotNone(job)
query = db_api.sync_job_list(self.ctx, 'active')
self.assertEqual(query[0].get('sync_status'), job.sync_status)
def test_sync_job_status(self):
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
self.assertIsNotNone(job)
query = db_api.sync_job_status(self.ctx, job_id=UUID1)
self.assertEqual(query, consts.JOB_PROGRESS)
def test_update_invalid_job(self):
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
self.assertIsNotNone(job)
self.assertRaises(exceptions.JobNotFound,
db_api.sync_job_update, self.ctx, 'fake_job',
consts.JOB_SUCCESS)
def test_resource_sync_create(self):
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
resource_sync_create = self.resource_sync_create(
self.ctx, job=job, region='Fake_region',
source_region='Fake_region2', resource='fake_key',
resource_type='keypair')
resource_type='keypair', job_id=job.job_id)
self.assertIsNotNone(resource_sync_create)
self.assertEqual(consts.JOB_PROGRESS, resource_sync_create.sync_status)
def test_resource_sync_status(self):
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
resource_sync_create = self.resource_sync_create(
self.ctx, job=job, region='Fake_region',
source_region='Fake_region2', resource='fake_key',
resource_type='keypair')
resource_type='keypair', job_id=job.job_id)
self.assertIsNotNone(resource_sync_create)
status = db_api.resource_sync_status(self.ctx, job.id)
status = db_api.resource_sync_status(self.ctx, job.job_id)
self.assertEqual(consts.JOB_PROGRESS, status[0])
def test_resource_sync_update(self):
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
resource_sync_create = self.resource_sync_create(
self.ctx, job=job, region='Fake_region',
source_region='Fake_region2', resource='fake_key',
resource_type='keypair')
resource_type='keypair', job_id=job.job_id)
self.assertIsNotNone(resource_sync_create)
self.assertEqual(consts.JOB_PROGRESS, resource_sync_create.sync_status)
self.assertEqual(consts.JOB_PROGRESS,
resource_sync_create.sync_status)
db_api.resource_sync_update(
self.ctx, job.id, 'Fake_region', 'fake_key', consts.JOB_SUCCESS)
rows = db_api.model_query(self.ctx, db_models.ResourceSync).\
filter_by(job_id=UUID1).all()
individual_result = db_api.sync_individual_resource(rows)
self.assertEqual(consts.JOB_SUCCESS, individual_result[0].
get('sync_status'))
updated_job = db_api.resource_sync_list_by_job_id(self.ctx, job.id)
self.ctx, job.job_id, consts.JOB_SUCCESS)
updated_job = db_api.resource_sync_list(self.ctx, job.job_id,
resource_type='keypair')
self.assertEqual(consts.JOB_SUCCESS, updated_job[0].get('sync_status'))
updated_job = db_api.\
resource_sync_list_by_job_name(self.ctx,
'fake_job_name')
self.assertEqual(consts.JOB_SUCCESS, updated_job[0].get('sync_status'))
def test_foreign_key(self):
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
self.assertIsNotNone(job)
resource_sync_create = self.resource_sync_create(
self.ctx, job=job, region='Fake_region',
source_region='Fake_region2', resource='fake_key',
resource_type='keypair')
resource_type='keypair', job_id=job.job_id)
self.assertIsNotNone(resource_sync_create)
self.assertEqual(job.id, resource_sync_create.job_id)
self.assertEqual(job.job_id, resource_sync_create.job_id)
def test_delete_sync_job(self):
job_id = UUID1
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
self.assertIsNotNone(job)
self.resource_sync_create(
self.ctx, job=job, region='Fake_region',
source_region='Fake_region2', resource='fake_key',
resource_type='keypair')
resource_type='keypair', job_id=job.job_id)
db_api.sync_job_delete(self.ctx, job_id)
updated_job = db_api.sync_job_list(self.ctx)
self.assertEqual(0, len(updated_job))
def test_composite_primary_key(self):
job = self.sync_job_create(self.ctx, job_name='fake_job_name',
job_id=UUID1)
job = self.sync_job_create(self.ctx, job_id=UUID1)
self.resource_sync_create(
self.ctx, job=job, region='Fake_region',
source_region='Fake_region2', resource='fake_key',
resource_type='keypair')
resource_type='keypair', job_id=job.job_id)
self.assertRaises(oslo_db.exception.DBDuplicateEntry,
self.resource_sync_create, self.ctx, job=job,
region='Fake_region', source_region='Fake_region2',
resource='fake_key', resource_type='keypair')
resource='fake_key', resource_type='keypair',
job_id=job.job_id)
def test_delete_failure_sync_job(self):
job = self.sync_job_create(self.ctx, job_id=UUID1)
db_api._delete_failure_sync_job(self.ctx, job.job_id)
self.assertNotIn(job.job_id, db_api.sync_job_list)

View File

@ -204,8 +204,8 @@ class TestNovaClient(base.KingbirdTestCase):
mock_novaclient.Client().flavors.list.return_value = [fake_flavor]
nv_client.check_and_delete_flavor_in_target_region(fake_flavor,
fake_flavor._info)
mock_novaclient.Client().flavors.get.\
assert_called_once_with(fake_flavor.id)
mock_novaclient.Client().flavors.find.\
assert_called_once_with(name=fake_flavor.name)
mock_novaclient.Client().flavors.list.\
assert_called_once_with(is_public=None)
mock_novaclient.Client().flavors.delete.\
@ -236,8 +236,8 @@ class TestNovaClient(base.KingbirdTestCase):
ram=fake_flavor.ram, ephemeral=fake_flavor.ephemeral,
rxtx_factor=fake_flavor.rxtx_factor,
swap=fake_flavor.swap, vcpus=fake_flavor.vcpus)
mock_novaclient.Client().flavors.get.\
assert_called_once_with(fake_flavor.id)
mock_novaclient.Client().flavors.find.\
assert_called_once_with(name=fake_flavor.name)
mock_novaclient.Client().flavors.list.\
assert_called_once_with(is_public=None)
@ -274,8 +274,8 @@ class TestNovaClient(base.KingbirdTestCase):
ram=fake_flavor.ram, ephemeral=fake_flavor.ephemeral,
rxtx_factor=fake_flavor.rxtx_factor,
swap=fake_flavor.swap, vcpus=fake_flavor.vcpus)
mock_novaclient.Client().flavors.get.\
assert_called_once_with(fake_flavor.id)
mock_novaclient.Client().flavors.find.\
assert_called_once_with(name=fake_flavor.name)
mock_novaclient.Client().flavors.list.\
assert_called_once_with(is_public=None)
self.assertEqual(mock_novaclient.Client().flavor_access.

View File

@ -13,6 +13,7 @@
# under the License.
import mock
from kingbird.common import consts
from kingbird.engine import flavor_sync_manager
from kingbird.tests import base
from kingbird.tests import utils
@ -49,29 +50,31 @@ class TestFlavorSyncManager(base.KingbirdTestCase):
@mock.patch.object(flavor_sync_manager, 'NovaClient')
@mock.patch.object(flavor_sync_manager, 'EndpointCache')
@mock.patch.object(flavor_sync_manager.FlavorSyncManager,
'create_resources')
'create_resources_in_region')
@mock.patch.object(flavor_sync_manager, 'db_api')
def test_flavor_sync_force_false_no_access_tenants(
self, mock_db_api, mock_create_resource, mock_endpoint_cache,
mock_nova):
access_tenants = None
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, 'fake_flavor', 1,
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, SOURCE_FLAVOR, 1,
1.0)
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = DEFAULT_FORCE
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [fake_flavor]
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = [SOURCE_FLAVOR]
mock_endpoint_cache().get_session_from_token.\
return_value = 'fake_session'
mock_nova().get_flavor.return_value = fake_flavor
mock_db_api().resource_sync_status.return_value = [JOB_RESULT]
mock_db_api.resource_sync_list.return_value = [resource_job]
fsm = flavor_sync_manager.FlavorSyncManager()
fsm.resource_sync(self.ctxt, FAKE_JOB_ID, payload)
fsm.resource_sync(self.ctxt, FAKE_JOB_ID, DEFAULT_FORCE)
mock_create_resource.assert_called_once_with(
FAKE_JOB_ID, payload['force'], payload['target'][0], fake_flavor,
'fake_session', self.ctxt, access_tenants)
FAKE_JOB_ID, DEFAULT_FORCE, resource_job['target_region'],
fake_flavor, 'fake_session', self.ctxt, access_tenants)
mock_nova().get_flavor_access_tenant.assert_not_called
mock_db_api.resource_sync_list.\
assert_called_once_with(self.ctxt, FAKE_JOB_ID, consts.FLAVOR)
mock_db_api.resource_sync_status.\
assert_called_once_with(self.ctxt, FAKE_JOB_ID)
mock_db_api.sync_job_update.\
@ -80,27 +83,27 @@ class TestFlavorSyncManager(base.KingbirdTestCase):
@mock.patch.object(flavor_sync_manager, 'NovaClient')
@mock.patch.object(flavor_sync_manager, 'EndpointCache')
@mock.patch.object(flavor_sync_manager.FlavorSyncManager,
'create_resources')
'create_resources_in_region')
@mock.patch.object(flavor_sync_manager, 'db_api')
def test_flavor_sync_force_true_no_access_tenants(
self, mock_db_api, mock_create_resource, mock_endpoint_cache,
mock_nova):
access_tenants = None
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, 'fake_flavor', 1,
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, SOURCE_FLAVOR, 1,
1.0)
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = True
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [fake_flavor]
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = [SOURCE_FLAVOR]
mock_endpoint_cache().get_session_from_token.\
return_value = 'fake_session'
mock_nova().get_flavor.return_value = fake_flavor
mock_db_api().resource_sync_status.return_value = [JOB_RESULT]
mock_db_api.resource_sync_list.return_value = [resource_job]
fsm = flavor_sync_manager.FlavorSyncManager()
fsm.resource_sync(self.ctxt, FAKE_JOB_ID, payload)
fsm.resource_sync(self.ctxt, FAKE_JOB_ID, True)
mock_create_resource.assert_called_once_with(
FAKE_JOB_ID, payload['force'], payload['target'][0], fake_flavor,
FAKE_JOB_ID, True, resource_job['target_region'], fake_flavor,
'fake_session', self.ctxt, access_tenants)
mock_nova().get_flavor_access_tenant.assert_not_called
mock_db_api.resource_sync_status.\
@ -111,31 +114,32 @@ class TestFlavorSyncManager(base.KingbirdTestCase):
@mock.patch.object(flavor_sync_manager, 'NovaClient')
@mock.patch.object(flavor_sync_manager, 'EndpointCache')
@mock.patch.object(flavor_sync_manager.FlavorSyncManager,
'create_resources')
'create_resources_in_region')
@mock.patch.object(flavor_sync_manager, 'db_api')
def test_flavor_sync_force_false_with_access_tenants(
self, mock_db_api, mock_create_resource, mock_endpoint_cache,
mock_nova):
access_tenants = FAKE_TENANTS
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, 'fake_flavor', 1,
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, SOURCE_FLAVOR, 1,
1.0, False)
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = DEFAULT_FORCE
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [fake_flavor]
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = [SOURCE_FLAVOR]
mock_nova().get_flavor_access_tenant.return_value = access_tenants
mock_endpoint_cache().get_session_from_token.\
return_value = 'fake_session'
mock_nova().get_flavor.return_value = fake_flavor
mock_db_api.resource_sync_list.return_value = [resource_job]
mock_db_api().resource_sync_status.return_value = [JOB_RESULT]
fsm = flavor_sync_manager.FlavorSyncManager()
fsm.resource_sync(self.ctxt, FAKE_JOB_ID, payload)
fsm.resource_sync(self.ctxt, FAKE_JOB_ID, DEFAULT_FORCE)
mock_create_resource.assert_called_once_with(
FAKE_JOB_ID, payload['force'], payload['target'][0], fake_flavor,
'fake_session', self.ctxt, access_tenants)
FAKE_JOB_ID, DEFAULT_FORCE, resource_job['target_region'],
fake_flavor, 'fake_session', self.ctxt, access_tenants)
mock_nova().get_flavor_access_tenant.\
assert_called_once_with(fake_flavor)
assert_called_once_with([fake_flavor.name])
mock_db_api.resource_sync_status.\
assert_called_once_with(self.ctxt, FAKE_JOB_ID)
mock_db_api.sync_job_update.\
@ -144,73 +148,53 @@ class TestFlavorSyncManager(base.KingbirdTestCase):
@mock.patch.object(flavor_sync_manager, 'NovaClient')
@mock.patch.object(flavor_sync_manager, 'EndpointCache')
@mock.patch.object(flavor_sync_manager.FlavorSyncManager,
'create_resources')
'create_resources_in_region')
@mock.patch.object(flavor_sync_manager, 'db_api')
def test_flavor_sync_force_true_with_access_tenants(
self, mock_db_api, mock_create_resource, mock_endpoint_cache,
mock_nova):
access_tenants = FAKE_TENANTS
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, 'fake_flavor', 1,
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, SOURCE_FLAVOR, 1,
1.0, False)
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = True
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [fake_flavor]
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = [SOURCE_FLAVOR]
mock_nova().get_flavor_access_tenant.return_value = access_tenants
mock_endpoint_cache().get_session_from_token.\
return_value = 'fake_session'
mock_nova().get_flavor.return_value = fake_flavor
mock_db_api.resource_sync_list.return_value = [resource_job]
mock_db_api().resource_sync_status.return_value = [JOB_RESULT]
fsm = flavor_sync_manager.FlavorSyncManager()
fsm.resource_sync(self.ctxt, FAKE_JOB_ID, payload)
fsm.resource_sync(self.ctxt, FAKE_JOB_ID, True)
mock_create_resource.assert_called_once_with(
FAKE_JOB_ID, payload['force'], payload['target'][0], fake_flavor,
FAKE_JOB_ID, True, resource_job['target_region'], fake_flavor,
'fake_session', self.ctxt, access_tenants)
mock_nova().get_flavor_access_tenant.\
assert_called_once_with(fake_flavor)
assert_called_once_with([fake_flavor.name])
mock_db_api.resource_sync_status.\
assert_called_once_with(self.ctxt, FAKE_JOB_ID)
mock_db_api.sync_job_update.\
assert_called_once_with(self.ctxt, FAKE_JOB_ID, JOB_RESULT)
@mock.patch.object(flavor_sync_manager.FlavorSyncManager,
'create_resources')
def test_create_resources_in_region(self, mock_create_resource):
access_tenants = None
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, 'fake_flavor', 1,
1.0, False)
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = True
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [fake_flavor]
fsm = flavor_sync_manager.FlavorSyncManager()
fsm.create_resources_in_region(FAKE_JOB_ID, payload['force'],
payload['target'], fake_flavor,
'fake_session', self.ctxt,
access_tenants)
mock_create_resource.assert_called_once_with(
FAKE_JOB_ID, payload['force'], payload['target'][0], fake_flavor,
'fake_session', self.ctxt, access_tenants)
@mock.patch.object(flavor_sync_manager, 'NovaClient')
@mock.patch.object(flavor_sync_manager, 'db_api')
def test_create_resources(self, mock_db_api, mock_nova):
access_tenants = None
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, 'fake_flavor', 1,
fake_flavor = FakeFlavor('fake_id', 512, 2, 30, SOURCE_FLAVOR, 1,
1.0, False)
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = True
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [fake_flavor]
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = [SOURCE_FLAVOR]
fsm = flavor_sync_manager.FlavorSyncManager()
fsm.create_resources(FAKE_JOB_ID, payload['force'],
payload['target'][0], fake_flavor,
'fake_session', self.ctxt, access_tenants)
fsm.create_resources_in_region(
FAKE_JOB_ID, DEFAULT_FORCE, resource_job['source_region'],
fake_flavor, 'fake_session', self.ctxt, access_tenants)
mock_nova().create_flavor.assert_called_once_with(
payload['force'], fake_flavor, access_tenants)
DEFAULT_FORCE, fake_flavor, access_tenants)
mock_db_api.resource_sync_update.assert_called_once_with(
self.ctxt, FAKE_JOB_ID, payload['target'][0], fake_flavor.name,
JOB_RESULT)
self.ctxt, FAKE_JOB_ID, JOB_RESULT)

View File

@ -108,11 +108,12 @@ class TestImageSyncManager(base.KingbirdTestCase):
'aki', 'aki', FAKE_ID)
fake_ari_image = FakeARIimage(0, 'False', 0, 'fake_ramdisk_image',
'ari', 'ari', FAKE_ID)
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = DEFAULT_FORCE
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [fake_ami_image.id]
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = fake_ami_image.id
mock_db_api.resource_sync_list.return_value = [resource_job]
expected_resources = {
'kernel_image': fake_aki_image,
'ramdisk_image': fake_ari_image
@ -120,7 +121,7 @@ class TestImageSyncManager(base.KingbirdTestCase):
mock_glance_adapter.check_dependent_images.\
return_value = expected_resources
ism = image_sync_manager.ImageSyncManager()
ism.resource_sync(self.ctxt, FAKE_JOB_ID, payload)
ism.resource_sync(self.ctxt, FAKE_JOB_ID, DEFAULT_FORCE)
mock_glance_adapter.check_dependent_images.\
assert_called_once_with(self.ctxt, FAKE_SOURCE_REGION,
fake_ami_image.id)
@ -141,19 +142,17 @@ class TestImageSyncManager(base.KingbirdTestCase):
mock_glance_adapter, mock_db_api):
fake_qcow2_image = FakeQCOW2Image(0, 'False', 0, FAKE_RESOURCE, 'bare',
'qcow2', FAKE_ID)
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = DEFAULT_FORCE
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [fake_qcow2_image.id]
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = fake_qcow2_image.id
mock_db_api.resource_sync_list.return_value = [resource_job]
expected_resources = None
mock_glance_adapter.check_dependent_images.\
return_value = expected_resources
ism = image_sync_manager.ImageSyncManager()
ism.resource_sync(self.ctxt, FAKE_JOB_ID, payload)
mock_glance_adapter.check_dependent_images.\
assert_called_once_with(self.ctxt, FAKE_SOURCE_REGION,
fake_qcow2_image.id)
ism.resource_sync(self.ctxt, FAKE_JOB_ID, DEFAULT_FORCE)
self.assertEqual(mock_glance_client().get_image_data.call_count, 1)
self.assertEqual(mock_glance_client().create_image.call_count, 1)
self.assertEqual(mock_glance_upload.call_count, 1)
@ -171,16 +170,17 @@ class TestImageSyncManager(base.KingbirdTestCase):
mock_glance_adapter, mock_db_api):
fake_aki_image = FakeAKIimage(0, 'False', 0, 'fake_kernel_image',
'aki', 'aki', FAKE_ID)
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = DEFAULT_FORCE
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [fake_aki_image.id]
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = fake_aki_image.id
mock_db_api.resource_sync_list.return_value = [resource_job]
expected_resources = None
mock_glance_adapter.check_dependent_images.\
return_value = expected_resources
ism = image_sync_manager.ImageSyncManager()
ism.resource_sync(self.ctxt, FAKE_JOB_ID, payload)
ism.resource_sync(self.ctxt, FAKE_JOB_ID, DEFAULT_FORCE)
mock_glance_adapter.check_dependent_images.\
assert_called_once_with(self.ctxt, FAKE_SOURCE_REGION,
fake_aki_image.id)
@ -201,16 +201,17 @@ class TestImageSyncManager(base.KingbirdTestCase):
mock_glance_adapter, mock_db_api):
fake_ari_image = FakeARIimage(0, 'False', 0, 'fake_ramdisk_image',
'ari', 'ari', FAKE_ID)
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = DEFAULT_FORCE
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [fake_ari_image.id]
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = fake_ari_image.id
mock_db_api.resource_sync_list.return_value = [resource_job]
expected_resources = None
mock_glance_adapter.check_dependent_images.\
return_value = expected_resources
ism = image_sync_manager.ImageSyncManager()
ism.resource_sync(self.ctxt, FAKE_JOB_ID, payload)
ism.resource_sync(self.ctxt, FAKE_JOB_ID, DEFAULT_FORCE)
mock_glance_adapter.check_dependent_images.\
assert_called_once_with(self.ctxt, FAKE_SOURCE_REGION,
fake_ari_image.id)
@ -231,7 +232,6 @@ class TestImageSyncManager(base.KingbirdTestCase):
True)
mock_db_api.resource_sync_update.\
assert_called_once_with(self.ctxt, FAKE_JOB_ID,
FAKE_TARGET_REGION, FAKE_RESOURCE,
FAKE_RESULT)
@patch('kingbird.engine.image_sync_manager.db_api')
@ -242,5 +242,4 @@ class TestImageSyncManager(base.KingbirdTestCase):
False)
mock_db_api.resource_sync_update.\
assert_called_once_with(self.ctxt, FAKE_JOB_ID,
FAKE_TARGET_REGION, FAKE_RESOURCE,
FAKE_RESULT_FAIL)

View File

@ -13,6 +13,7 @@
# under the License.
import mock
from kingbird.common import consts
from kingbird.engine import keypair_sync_manager
from kingbird.tests import base
from kingbird.tests import utils
@ -39,54 +40,56 @@ class TestKeypairSyncManager(base.KingbirdTestCase):
@mock.patch.object(keypair_sync_manager, 'NovaClient')
@mock.patch.object(keypair_sync_manager, 'EndpointCache')
@mock.patch.object(keypair_sync_manager.KeypairSyncManager,
'create_resources')
@mock.patch.object(keypair_sync_manager, 'db_api')
def test_keypair_sync_force_false(self, mock_db_api, mock_create_resource,
def test_keypair_sync_force_false(self, mock_db_api,
mock_endpoint_cache, mock_nova):
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = DEFAULT_FORCE
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [SOURCE_KEYPAIR]
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = [SOURCE_KEYPAIR]
fake_key = FakeKeypair('fake_name', 'fake-rsa')
mock_endpoint_cache().get_session_from_token.\
return_value = 'fake_session'
mock_db_api.resource_sync_list.return_value = [resource_job]
mock_nova().get_keypairs.return_value = fake_key
ksm = keypair_sync_manager.KeypairSyncManager()
ksm.resource_sync(self.ctxt, FAKE_JOB_ID, payload)
mock_create_resource.assert_called_once_with(
FAKE_JOB_ID, payload['force'], payload['target'][0], fake_key,
'fake_session', self.ctxt)
ksm.resource_sync(self.ctxt, FAKE_JOB_ID, DEFAULT_FORCE)
mock_db_api.resource_sync_list.assert_called_once_with(
self.ctxt, FAKE_JOB_ID, consts.KEYPAIR)
mock_nova().get_keypairs.assert_called_once_with(
resource_job['resource'])
@mock.patch.object(keypair_sync_manager, 'NovaClient')
@mock.patch.object(keypair_sync_manager, 'EndpointCache')
@mock.patch.object(keypair_sync_manager.KeypairSyncManager,
'create_resources')
@mock.patch.object(keypair_sync_manager, 'db_api')
def test_keypair_sync_force_true(self, mock_db_api, mock_create_resource,
mock_endpoint_cache, mock_nova):
payload = dict()
payload['target'] = [FAKE_TARGET_REGION]
payload['force'] = True
payload['source'] = FAKE_SOURCE_REGION
payload['resources'] = [SOURCE_KEYPAIR]
def test_keypair_sync_force_true(self, mock_db_api, mock_endpoint_cache,
mock_nova):
resource_job = dict()
resource_job['id'] = FAKE_JOB_ID
resource_job['target_region'] = [FAKE_TARGET_REGION]
resource_job['source_region'] = FAKE_SOURCE_REGION
resource_job['resource'] = [SOURCE_KEYPAIR]
fake_key = FakeKeypair('fake_name', 'fake-rsa')
mock_endpoint_cache().get_session_from_token.\
return_value = 'fake_session'
mock_db_api.resource_sync_list.return_value = [resource_job]
mock_nova().get_keypairs.return_value = fake_key
ksm = keypair_sync_manager.KeypairSyncManager()
ksm.resource_sync(self.ctxt, FAKE_JOB_ID, payload)
mock_create_resource.assert_called_once_with(
FAKE_JOB_ID, payload['force'], payload['target'][0], fake_key,
'fake_session', self.ctxt)
ksm.resource_sync(self.ctxt, FAKE_JOB_ID, True)
mock_db_api.resource_sync_list.assert_called_once_with(
self.ctxt, FAKE_JOB_ID, consts.KEYPAIR)
mock_nova().get_keypairs.assert_called_once_with(
resource_job['resource'])
@mock.patch.object(keypair_sync_manager, 'NovaClient')
@mock.patch.object(keypair_sync_manager, 'db_api')
def test_create_keypair(self, mock_db_api, mock_nova):
fake_key = FakeKeypair('fake_name', 'fake-rsa')
ksm = keypair_sync_manager.KeypairSyncManager()
ksm.create_resources(FAKE_JOB_ID, DEFAULT_FORCE, FAKE_TARGET_REGION,
fake_key, 'fake_session', self.ctxt)
ksm.create_resources_in_region(
FAKE_JOB_ID, DEFAULT_FORCE, FAKE_TARGET_REGION,
fake_key, 'fake_session', self.ctxt)
mock_nova().create_keypairs.\
assert_called_once_with(DEFAULT_FORCE, fake_key)
self.assertEqual(1, mock_db_api.resource_sync_update.call_count)

View File

@ -33,6 +33,7 @@ class TestEngineService(base.KingbirdTestCase):
self.service_obj = service.EngineService('kingbird',
'kingbird-engine')
self.payload = {}
self.force = False
self.user_id = FAKE_USER
self.job_id = FAKE_JOB
@ -125,22 +126,22 @@ class TestEngineService(base.KingbirdTestCase):
self.service_obj.init_tgm()
self.service_obj.init_ksm()
self.service_obj.keypair_sync_for_user(
self.context, self.job_id, self.payload)
self.context, self.job_id, self.force)
mock_keypair_sync_manager().resource_sync.\
assert_called_once_with(self.context, self.job_id, self.payload)
assert_called_once_with(self.context, self.job_id, self.force)
@mock.patch.object(service, 'ImageSyncManager')
def test_image_sync(self, mock_image_sync_manager):
self.service_obj.init_tgm()
self.service_obj.init_ism()
self.service_obj.image_sync(self.context, self.job_id, self.payload)
self.service_obj.image_sync(self.context, self.job_id, self.force)
mock_image_sync_manager().resource_sync.\
assert_called_once_with(self.context, self.job_id, self.payload)
assert_called_once_with(self.context, self.job_id, self.force)
@mock.patch.object(service, 'FlavorSyncManager')
def test_flavor_sync(self, mock_flavor_sync_manager):
self.service_obj.init_tgm()
self.service_obj.init_fsm()
self.service_obj.flavor_sync(self.context, self.job_id, self.payload)
self.service_obj.flavor_sync(self.context, self.job_id, self.force)
mock_flavor_sync_manager().resource_sync.\
assert_called_once_with(self.context, self.job_id, self.payload)
assert_called_once_with(self.context, self.job_id, self.force)