Fix failing tests for py27 and py34

Closes-bug: #1769105

Change-Id: Ic709c19edee47a7bf4e73bca78c285de7f01e71a
This commit is contained in:
Vamsi Surapureddi 2018-05-05 00:20:40 +05:30
parent 424e7328cc
commit 5ec6bd1c41
8 changed files with 46 additions and 21 deletions

View File

@ -21,6 +21,7 @@ from oslo_utils import uuidutils
import pecan
from pecan import expose
from pecan import request
from six import iteritems
from kingbird.api.controllers import restcomm
from kingbird.api import enforcer as enf
@ -209,7 +210,7 @@ class BaseController(object):
pecan.abort(400, _('quota_set in body is required'))
try:
utils.validate_quota_limits(payload)
for resource, limit in payload.iteritems():
for resource, limit in iteritems(payload):
try:
# Update quota limit in DB
result = db_api.quota_update(
@ -444,7 +445,7 @@ class QuotaManagerV1Controller(BaseController):
if not enforce:
pecan.abort(403, _('Admin required'))
if args:
payload = args.keys()
payload = list(args.keys())
if not payload:
pecan.abort(400, _('quota_set in body required'))
self.delete_quota_resources(context, project_id, payload)

View File

@ -14,6 +14,7 @@
# limitations under the License.
import itertools
from six.moves import zip_longest
from kingbird.common import consts
from kingbird.common import exceptions
@ -26,7 +27,7 @@ def get_import_path(cls):
# Returns a iterator of tuples containing batch_size number of objects in each
def get_batch_projects(batch_size, project_list, fillvalue=None):
args = [iter(project_list)] * batch_size
return itertools.izip_longest(fillvalue=fillvalue, *args)
return zip_longest(fillvalue=fillvalue, *args)
# to do validate the quota limits

View File

@ -48,7 +48,7 @@ def check_dependent_images(context, region, image_id):
% {'ramdisk_image': ramdisk_image.id,
'region': region})
except exceptions.DependentImageNotFound():
except exceptions.DependentImageNotFound:
raise
return {

View File

@ -119,7 +119,7 @@ class GlanceClient(object):
'virtual_size', 'schema']
# split out the usual key and the properties which are top-level
for key in six.iterkeys(image):
for key in six.iterkeys(image.__dict__):
if key not in fields_after_creation:
kwargs[key] = image.get(key)
@ -162,4 +162,5 @@ class GlanceUpload(object):
entire imagedata into an iterator and send this 65536kb chunk to
the glance image upload and there by omitting the usage of file.
"""
return self.received.next()
return next(self.received)

View File

@ -112,7 +112,7 @@ class NovaClient(base.DriverBase):
return keypair
except Exception as exception:
LOG.error('Exception Occurred: %s', exception.message)
LOG.error('Exception Occurred: %s', str(exception))
pass
def create_keypairs(self, force, keypair):

View File

@ -14,8 +14,9 @@
# limitations under the License.
import collections
from Queue import Queue
from queue import Queue
import re
from six import iteritems
import threading
import time
@ -133,7 +134,7 @@ class QuotaManager(manager.Manager):
project_id)
except exceptions.ProjectQuotaNotFound:
limits_from_db = {}
for current_resource in CONF.kingbird_global_limit.iteritems():
for current_resource in iteritems(CONF.kingbird_global_limit):
resource = re.sub('quota_', '', current_resource[0])
# If resource limit in DB, then use it or else use limit
# from conf file

View File

@ -83,6 +83,7 @@ class SyncJob(object):
self.job_id = id
self.sync_status = sync_status
self.created_at = created_at
self._sa_class_manager = True
class TestResourceManager(testroot.KBApiTest):
@ -370,15 +371,22 @@ class TestResourceManager(testroot.KBApiTest):
def test_delete_in_progress_job(self, mock_db_api, mock_rpc_client):
delete_url = FAKE_URL + '/' + FAKE_JOB
mock_db_api.sync_job_status.return_value = consts.JOB_PROGRESS
self.assertRaises(KeyError, self.app.delete_json, delete_url,
headers=FAKE_HEADERS)
self.assertRaises(
KeyError,
self.app.delete_json,
delete_url,
headers=FAKE_HEADERS
)
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'db_api')
def test_get_job(self, mock_db_api, mock_rpc_client):
get_url = FAKE_URL
self.app.get(get_url, headers=FAKE_HEADERS)
self.assertEqual(1, mock_db_api.sync_job_list.call_count)
sync_job = SyncJob(FAKE_JOB, consts.JOB_PROGRESS, timeutils.utcnow())
mock_db_api.sync_job_list.return_value = sync_job
response = self.app.get(get_url, headers=FAKE_HEADERS)
self.assertEqual(response.status_int, 200)
self.assertEqual(response.json['job_set']['job_id'], FAKE_JOB)
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'db_api')
@ -392,17 +400,30 @@ class TestResourceManager(testroot.KBApiTest):
@mock.patch.object(sync_manager, 'db_api')
def test_get_active_job(self, mock_db_api, mock_rpc_client):
get_url = FAKE_URL + '/active'
self.app.get(get_url, headers=FAKE_HEADERS)
self.assertEqual(1, mock_db_api.sync_job_list.call_count)
sync_job = SyncJob(FAKE_JOB, consts.JOB_PROGRESS, timeutils.utcnow())
mock_db_api.sync_job_list.return_value = sync_job
response = self.app.get(get_url, headers=FAKE_HEADERS)
self.assertEqual(response.status_int, 200)
self.assertEqual(response.json['job_set']['job_id'], FAKE_JOB)
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'db_api')
def test_get_detail_job_by_id(self, mock_db_api, mock_rpc_client):
get_url = FAKE_URL + '/' + FAKE_JOB
self.app.get(get_url, headers=FAKE_HEADERS)
self.assertEqual(1,
mock_db_api.resource_sync_list
.call_count)
mocked_response = [{
'resource': 'fake_key',
'target_region': 'Fake_region',
'sync_status': 'SUCCESS',
'created_at': '2018-05-04T16:25:46.606433',
'updated_at': '2018-05-04T16:25:46.609030',
'source_region': 'Fake_region2',
'id': '205d7c52-5042-46e1-8873-5b5ffb171108',
'resource_type': 'keypair'
}]
mock_db_api.resource_sync_list.return_value = mocked_response
response = self.app.get(get_url, headers=FAKE_HEADERS)
self.assertEqual(response.status_int, 200)
self.assertEqual(response.json['job_set'], mocked_response)
@mock.patch.object(rpc_client, 'EngineClient')
@mock.patch.object(sync_manager, 'db_api')

View File

@ -11,7 +11,7 @@
# under the License.
from collections import Counter
import mock
from Queue import Queue
from queue import Queue
import uuid
from oslo_config import cfg
@ -25,7 +25,7 @@ CONF = cfg.CONF
FAKE_PROJECT = 'fake_project'
FAKE_REGION = 'fake_region'
FAKE_ENGINE_ID = str(uuid.uuid4())
NOVA_USAGE = {'ram': 100, 'cores': '50'}
NOVA_USAGE = {'ram': 100, 'cores': 50}
NEUTRON_USAGE = {'port': 10}
CINDER_USAGE = {'volumes': 18}
FAKE_REGION_DICT = {'region1': {'ram': 100},