Update hacking for Python3

The repo is Python 3 now, so update hacking to version 3.0 which
supports Python 3.

Fix problems found.

Update local hacking checks for new flake8.

# to unbreak gate:
Depends-on: https://review.opendev.org/715835

Change-Id: Icc2f4368cc90689d74510ce36fe77d2346aec625
This commit is contained in:
Andreas Jaeger 2020-03-29 14:39:10 +02:00 committed by Erik Olof Gunnar Andersson
parent d96ed3fa1a
commit 2e3d8ab80d
32 changed files with 124 additions and 111 deletions

View File

@ -39,7 +39,7 @@ class Zone:
self.to_file(sys.stdout) self.to_file(sys.stdout)
def to_file(self, f): def to_file(self, f):
if type(f) is 'file': if type(f) == 'file':
fd = f fd = f
elif type(f) is str: elif type(f) is str:
if os.path.isdir(f): if os.path.isdir(f):
@ -203,5 +203,6 @@ def main():
except IOError as e: except IOError as e:
LOG.error(e) LOG.error(e)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View File

@ -104,7 +104,7 @@ class BlacklistsController(rest.RestController):
response = pecan.response response = pecan.response
if request.content_type == 'application/json-patch+json': if request.content_type == 'application/json-patch+json':
raise NotImplemented('json-patch not implemented') raise NotImplementedError('json-patch not implemented')
# Fetch the existing blacklist entry # Fetch the existing blacklist entry
blacklist = self.central_api.get_blacklist(context, blacklist_id) blacklist = self.central_api.get_blacklist(context, blacklist_id)

View File

@ -108,7 +108,7 @@ class PoolsController(rest.RestController):
response = pecan.response response = pecan.response
if request.content_type == 'application/json-patch+json': if request.content_type == 'application/json-patch+json':
raise NotImplemented('json-patch not implemented') raise NotImplementedError('json-patch not implemented')
# Fetch the existing pool # Fetch the existing pool
pool = self.central_api.get_pool(context, pool_id) pool = self.central_api.get_pool(context, pool_id)

View File

@ -98,7 +98,7 @@ class TldsController(rest.RestController):
body = request.body_dict body = request.body_dict
response = pecan.response response = pecan.response
if request.content_type == 'application/json-patch+json': if request.content_type == 'application/json-patch+json':
raise NotImplemented('json-patch not implemented') raise NotImplementedError('json-patch not implemented')
# Fetch the existing tld # Fetch the existing tld
tld = self.central_api.get_tld(context, tld_id) tld = self.central_api.get_tld(context, tld_id)

View File

@ -100,7 +100,7 @@ class TsigKeysController(rest.RestController):
response = pecan.response response = pecan.response
if request.content_type == 'application/json-patch+json': if request.content_type == 'application/json-patch+json':
raise NotImplemented('json-patch not implemented') raise NotImplementedError('json-patch not implemented')
# Fetch the existing tsigkey entry # Fetch the existing tsigkey entry
tsigkey = self.central_api.get_tsigkey(context, tsigkey_id) tsigkey = self.central_api.get_tsigkey(context, tsigkey_id)

View File

@ -155,7 +155,7 @@ class ZonesController(rest.RestController):
# 1) "Nested" resources? records inside a recordset. # 1) "Nested" resources? records inside a recordset.
# 2) What to do when a zone doesn't exist in the first place? # 2) What to do when a zone doesn't exist in the first place?
# 3) ...? # 3) ...?
raise NotImplemented('json-patch not implemented') raise NotImplementedError('json-patch not implemented')
else: else:
# Update the zone object with the new values # Update the zone object with the new values
zone = DesignateAdapter.parse('API_v2', body, zone) zone = DesignateAdapter.parse('API_v2', body, zone)

View File

@ -121,7 +121,7 @@ class TransferRequestsController(rest.RestController):
response = pecan.response response = pecan.response
if request.content_type == 'application/json-patch+json': if request.content_type == 'application/json-patch+json':
raise NotImplemented('json-patch not implemented') raise NotImplementedError('json-patch not implemented')
# Fetch the existing zone_transfer_request # Fetch the existing zone_transfer_request
zone_transfer_request = self.central_api.get_zone_transfer_request( zone_transfer_request = self.central_api.get_zone_transfer_request(

View File

@ -149,7 +149,7 @@ class GdnsdBackend(base.AgentBackend):
def _generate_zone_filename(self, zone_name): def _generate_zone_filename(self, zone_name):
"""Generate a filename for a zone file """Generate a filename for a zone file
"/" is traslated into "@" "/" is traslated into "@"
Non-valid characters are translated into \ NNN Non-valid characters are translated into \\ NNN
where NNN is a decimal integer in the range 0 - 255 where NNN is a decimal integer in the range 0 - 255
The filename is lowercase The filename is lowercase

View File

@ -160,7 +160,7 @@ class Knot2Backend(base.AgentBackend):
try: try:
serial = out.split('|')[1].split()[1] serial = out.split('|')[1].split()[1]
return int(serial) return int(serial)
except Exception as e: except Exception:
LOG.error("Unable to parse knotc output: %r", out) LOG.error("Unable to parse knotc output: %r", out)
raise exceptions.Backend("Unexpected knotc zone-status output") raise exceptions.Backend("Unexpected knotc zone-status output")

View File

@ -52,17 +52,17 @@ class NSD4Backend(base.Backend):
self.pattern = self.options.get('pattern', 'slave') self.pattern = self.options.get('pattern', 'slave')
def _command(self, command): def _command(self, command):
sock = eventlet.wrap_ssl( sock = eventlet.wrap_ssl(
eventlet.connect((self.host, self.port)), eventlet.connect((self.host, self.port)),
keyfile=self.keyfile, keyfile=self.keyfile,
certfile=self.certfile) certfile=self.certfile)
stream = sock.makefile() stream = sock.makefile()
stream.write('%s %s\n' % (self.NSDCT_VERSION, command)) stream.write('%s %s\n' % (self.NSDCT_VERSION, command))
stream.flush() stream.flush()
result = stream.read() result = stream.read()
stream.close() stream.close()
sock.close() sock.close()
return result return result
def _execute_nsd4(self, command): def _execute_nsd4(self, command):
try: try:

View File

@ -50,7 +50,7 @@ class PDNS4Backend(base.Backend):
self._build_url(zone=zone.name), self._build_url(zone=zone.name),
headers=self.headers, headers=self.headers,
) )
return zone.status_code is 200 return zone.status_code == 200
def create_zone(self, context, zone): def create_zone(self, context, zone):
"""Create a DNS zone""" """Create a DNS zone"""

View File

@ -14,6 +14,7 @@
# under the License. # under the License.
import re import re
from hacking import core
import pycodestyle import pycodestyle
# D701: Default parameter value is a mutable type # D701: Default parameter value is a mutable type
@ -34,17 +35,18 @@ mutable_default_argument_check = re.compile(
string_translation = re.compile(r"[^_]*_\(\s*('|\")") string_translation = re.compile(r"[^_]*_\(\s*('|\")")
translated_log = re.compile( translated_log = re.compile(
r"(.)*LOG\.(audit|error|info|warn|warning|critical|exception)" r"(.)*LOG\.(audit|error|info|warn|warning|critical|exception)"
"\(\s*_\(\s*('|\")") r"\(\s*_\(\s*('|\")")
underscore_import_check = re.compile(r"(.)*import _(.)*") underscore_import_check = re.compile(r"(.)*import _(.)*")
# We need this for cases where they have created their own _ function. # We need this for cases where they have created their own _ function.
custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*") custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*")
graduated_oslo_libraries_import_re = re.compile( graduated_oslo_libraries_import_re = re.compile(
r"^\s*(?:import|from) designate\.openstack\.common\.?.*?" r"^\s*(?:import|from) designate\.openstack\.common\.?.*?"
"(gettextutils|rpc)" r"(gettextutils|rpc)"
".*?") r".*?")
def mutable_default_arguments(logical_line, physical_line, filename): @core.flake8ext
def mutable_default_arguments(physical_line, logical_line, filename):
if pycodestyle.noqa(physical_line): if pycodestyle.noqa(physical_line):
return return
@ -52,6 +54,7 @@ def mutable_default_arguments(logical_line, physical_line, filename):
yield (0, "D701: Default parameter value is a mutable type") yield (0, "D701: Default parameter value is a mutable type")
@core.flake8ext
def no_translate_debug_logs(logical_line, filename): def no_translate_debug_logs(logical_line, filename):
"""Check for 'LOG.debug(_(' """Check for 'LOG.debug(_('
As per our translation policy, As per our translation policy,
@ -66,6 +69,7 @@ def no_translate_debug_logs(logical_line, filename):
yield(0, "D706: Don't translate debug level logs") yield(0, "D706: Don't translate debug level logs")
@core.flake8ext
def check_explicit_underscore_import(logical_line, filename): def check_explicit_underscore_import(logical_line, filename):
"""Check for explicit import of the _ function """Check for explicit import of the _ function
@ -86,6 +90,7 @@ def check_explicit_underscore_import(logical_line, filename):
yield(0, "D703: Found use of _() without explicit import of _!") yield(0, "D703: Found use of _() without explicit import of _!")
@core.flake8ext
def no_import_graduated_oslo_libraries(logical_line, filename): def no_import_graduated_oslo_libraries(logical_line, filename):
"""Check that we don't continue to use o.c. oslo libraries after graduation """Check that we don't continue to use o.c. oslo libraries after graduation
@ -105,6 +110,7 @@ def no_import_graduated_oslo_libraries(logical_line, filename):
"graduated!" % matches.group(1)) "graduated!" % matches.group(1))
@core.flake8ext
def use_timeutils_utcnow(logical_line, filename): def use_timeutils_utcnow(logical_line, filename):
# tools are OK to use the standard datetime module # tools are OK to use the standard datetime module
if "/tools/" in filename: if "/tools/" in filename:
@ -119,6 +125,7 @@ def use_timeutils_utcnow(logical_line, filename):
yield (pos, msg % f) yield (pos, msg % f)
@core.flake8ext
def check_no_basestring(logical_line): def check_no_basestring(logical_line):
if re.search(r"\bbasestring\b", logical_line): if re.search(r"\bbasestring\b", logical_line):
msg = ("D707: basestring is not Python3-compatible, use " msg = ("D707: basestring is not Python3-compatible, use "
@ -126,12 +133,14 @@ def check_no_basestring(logical_line):
yield(0, msg) yield(0, msg)
@core.flake8ext
def check_python3_xrange(logical_line): def check_python3_xrange(logical_line):
if re.search(r"\bxrange\s*\(", logical_line): if re.search(r"\bxrange\s*\(", logical_line):
yield(0, "D708: Do not use xrange. Use range, or six.moves.range for " yield(0, "D708: Do not use xrange. Use range, or six.moves.range for "
"large loops.") "large loops.")
@core.flake8ext
def check_no_log_audit(logical_line): def check_no_log_audit(logical_line):
"""Ensure that we are not using LOG.audit messages """Ensure that we are not using LOG.audit messages
Plans are in place going forward as discussed in the following Plans are in place going forward as discussed in the following
@ -141,14 +150,3 @@ def check_no_log_audit(logical_line):
""" """
if "LOG.audit(" in logical_line: if "LOG.audit(" in logical_line:
yield(0, "D709: LOG.audit is deprecated, please use LOG.info!") yield(0, "D709: LOG.audit is deprecated, please use LOG.info!")
def factory(register):
register(mutable_default_arguments)
register(no_translate_debug_logs)
register(check_explicit_underscore_import)
register(no_import_graduated_oslo_libraries)
register(use_timeutils_utcnow)
register(check_no_basestring)
register(check_python3_xrange)
register(check_no_log_audit)

View File

@ -52,6 +52,7 @@ class HookLog(object):
if name in self.LVLS: if name in self.LVLS:
return functools.partial(self.capture, self.LVLS[name]) return functools.partial(self.capture, self.LVLS[name])
LOG = HookLog() LOG = HookLog()

View File

@ -20,7 +20,8 @@ from designate import objects
class NotSpecifiedSential: class NotSpecifiedSential:
pass pass
REQUIRED_RE = re.compile("\'([\w]*)\' is a required property")
REQUIRED_RE = re.compile(r"\'([\w]*)\' is a required property")
class ValidationErrorAPIv2Adapter(base.APIv2Adapter): class ValidationErrorAPIv2Adapter(base.APIv2Adapter):

View File

@ -36,7 +36,7 @@ class ZoneMasterAPIv2Adapter(base.APIv2Adapter):
@classmethod @classmethod
def _render_object(cls, object, *arg, **kwargs): def _render_object(cls, object, *arg, **kwargs):
if object.port is 53: if object.port == 53:
return object.host return object.host
else: else:
return "%(host)s:%(port)d" % object.to_dict() return "%(host)s:%(port)d" % object.to_dict()

View File

@ -288,7 +288,7 @@ class DesignateObject(base.VersionedObject):
else: else:
try: try:
field.coerce(self, name, value) # Check value field.coerce(self, name, value) # Check value
except Exception as e: except Exception:
raise exceptions.InvalidObject( raise exceptions.InvalidObject(
"{} is invalid".format(name)) "{} is invalid".format(name))
elif not field.nullable: elif not field.nullable:

View File

@ -121,7 +121,7 @@ class RecordSet(base.DesignateObject, base.DictObjectMixin,
try: try:
record_list_cls = self.obj_cls_from_name('%sList' % self.type) record_list_cls = self.obj_cls_from_name('%sList' % self.type)
record_cls = self.obj_cls_from_name(self.type) record_cls = self.obj_cls_from_name(self.type)
except (KeyError, ovo_exc.UnsupportedObjectError) as e: except (KeyError, ovo_exc.UnsupportedObjectError):
err_msg = ("'%(type)s' is not a valid record type" err_msg = ("'%(type)s' is not a valid record type"
% {'type': self.type}) % {'type': self.type})
self._validate_fail(errors, err_msg) self._validate_fail(errors, err_msg)

View File

@ -61,7 +61,7 @@ class Scheduler(object):
""" """
pools = self.storage.find_pools(context) pools = self.storage.find_pools(context)
if len(self.filters) is 0: if len(self.filters) == 0:
raise exceptions.NoFiltersConfigured('There are no scheduling ' raise exceptions.NoFiltersConfigured('There are no scheduling '
'filters configured') 'filters configured')
@ -75,7 +75,7 @@ class Scheduler(object):
if len(pools) > 1: if len(pools) > 1:
raise exceptions.MultiplePoolsFound() raise exceptions.MultiplePoolsFound()
if len(pools) is 0: if len(pools) == 0:
raise exceptions.NoValidPoolFound('There are no pools that ' raise exceptions.NoValidPoolFound('There are no pools that '
'matched your request') 'matched your request')
return pools[0].id return pools[0].id

View File

@ -40,7 +40,7 @@ class FallbackFilter(base.Filter):
""" """
def filter(self, context, pools, zone): def filter(self, context, pools, zone):
if len(pools) is 0: if len(pools) == 0:
pools = objects.PoolList() pools = objects.PoolList()
pools.append( pools.append(
objects.Pool(id=cfg.CONF['service:central'].default_pool_id)) objects.Pool(id=cfg.CONF['service:central'].default_pool_id))

View File

@ -108,7 +108,7 @@ class Schema(object):
schema = schema['properties'] schema = schema['properties']
with self.resolver.resolving(schema['$ref']) as ischema: with self.resolver.resolving(schema['$ref']) as ischema:
schema = ischema schema = ischema
return [self.filter(i, schema) for i in instance] return [self.filter(i, schema) for i in instance]

View File

@ -46,25 +46,25 @@ def type_draft3(validator, types, instance, schema):
def oneOf_draft3(validator, oneOf, instance, schema): def oneOf_draft3(validator, oneOf, instance, schema):
# Backported from Draft4 to Draft3 # Backported from Draft4 to Draft3
subschemas = iter(oneOf) subschemas = iter(oneOf)
first_valid = next( first_valid = next(
(s for s in subschemas if validator.is_valid(instance, s)), None, (s for s in subschemas if validator.is_valid(instance, s)), None,
) )
if first_valid is None: if first_valid is None:
yield jsonschema.ValidationError(
"%r is not valid under any of the given schemas." % (instance,)
)
else:
more_valid = [s for s in subschemas
if validator.is_valid(instance, s)]
if more_valid:
more_valid.append(first_valid)
reprs = ", ".join(repr(schema) for schema in more_valid)
yield jsonschema.ValidationError( yield jsonschema.ValidationError(
"%r is not valid under any of the given schemas." % (instance,) "%r is valid under each of %s" % (instance, reprs)
) )
else:
more_valid = [s for s in subschemas
if validator.is_valid(instance, s)]
if more_valid:
more_valid.append(first_valid)
reprs = ", ".join(repr(schema) for schema in more_valid)
yield jsonschema.ValidationError(
"%r is valid under each of %s" % (instance, reprs)
)
def type_draft4(validator, types, instance, schema): def type_draft4(validator, types, instance, schema):

View File

@ -49,18 +49,18 @@ def _set_object_from_model(obj, model, **extra):
def _set_listobject_from_models(obj, models, map_=None): def _set_listobject_from_models(obj, models, map_=None):
for model in models: for model in models:
extra = {} extra = {}
if map_ is not None: if map_ is not None:
extra = map_(model) extra = map_(model)
obj.objects.append( obj.objects.append(
_set_object_from_model(obj.LIST_ITEM_TYPE(), model, **extra)) _set_object_from_model(obj.LIST_ITEM_TYPE(), model, **extra))
obj.obj_reset_changes() obj.obj_reset_changes()
return obj return obj
@six.add_metaclass(abc.ABCMeta) @six.add_metaclass(abc.ABCMeta)

View File

@ -1798,5 +1798,5 @@ class SQLAlchemyStorage(sqlalchemy_base.SQLAlchemy, storage_base.Storage):
def _rname_check(self, criterion): def _rname_check(self, criterion):
# If the criterion has 'name' in it, switch it out for reverse_name # If the criterion has 'name' in it, switch it out for reverse_name
if criterion is not None and criterion.get('name', "").startswith('*'): if criterion is not None and criterion.get('name', "").startswith('*'):
criterion['reverse_name'] = criterion.pop('name')[::-1] criterion['reverse_name'] = criterion.pop('name')[::-1]
return criterion return criterion

View File

@ -248,8 +248,8 @@ class CentralServiceTest(CentralTestCase):
values['ttl'] = 0 values['ttl'] = 0
with testtools.ExpectedException(exceptions.InvalidTTL): with testtools.ExpectedException(exceptions.InvalidTTL):
self.central_service._is_valid_ttl( self.central_service._is_valid_ttl(
context, values['ttl']) context, values['ttl'])
# TLD Tests # TLD Tests
def test_create_tld(self): def test_create_tld(self):
@ -721,8 +721,8 @@ class CentralServiceTest(CentralTestCase):
values['ttl'] = 0 values['ttl'] = 0
with testtools.ExpectedException(ValueError): with testtools.ExpectedException(ValueError):
self.central_service.create_zone( self.central_service.create_zone(
context, objects.Zone.from_dict(values)) context, objects.Zone.from_dict(values))
def test_create_zone_below_zero_ttl(self): def test_create_zone_below_zero_ttl(self):
self.policy({'use_low_ttl': '!'}) self.policy({'use_low_ttl': '!'})

View File

@ -148,15 +148,15 @@ class NovaFixedHandlerTest(TestCase, NotificationHandlerMixin):
fixture = self.get_notification_fixture('nova', event_type) fixture = self.get_notification_fixture('nova', event_type)
with mock.patch.object( with mock.patch.object(
self.plugin, '_create_or_update_recordset') as finder: self.plugin, '_create_or_update_recordset') as finder:
with mock.patch.object(self.plugin.central_api, with mock.patch.object(self.plugin.central_api,
'create_record'): 'create_record'):
finder.return_value = {'id': 'fakeid'} finder.return_value = {'id': 'fakeid'}
self.plugin.process_notification( self.plugin.process_notification(
self.admin_context.to_dict(), self.admin_context.to_dict(),
event_type, fixture['payload']) event_type, fixture['payload'])
finder.assert_called_once_with( finder.assert_called_once_with(
mock.ANY, mock.ANY, type='A', zone_id=self.zone_id, mock.ANY, mock.ANY, type='A', zone_id=self.zone_id,
name='private.example.com.') name='private.example.com.')
def test_formatv4(self): def test_formatv4(self):
event_type = 'compute.instance.create.end' event_type = 'compute.instance.create.end'
@ -165,15 +165,15 @@ class NovaFixedHandlerTest(TestCase, NotificationHandlerMixin):
fixture = self.get_notification_fixture('nova', event_type) fixture = self.get_notification_fixture('nova', event_type)
with mock.patch.object( with mock.patch.object(
self.plugin, '_create_or_update_recordset') as finder: self.plugin, '_create_or_update_recordset') as finder:
with mock.patch.object(self.plugin.central_api, with mock.patch.object(self.plugin.central_api,
'create_record'): 'create_record'):
finder.return_value = {'id': 'fakeid'} finder.return_value = {'id': 'fakeid'}
self.plugin.process_notification( self.plugin.process_notification(
self.admin_context.to_dict(), self.admin_context.to_dict(),
event_type, fixture['payload']) event_type, fixture['payload'])
finder.assert_called_once_with( finder.assert_called_once_with(
mock.ANY, mock.ANY, type='A', zone_id=self.zone_id, mock.ANY, mock.ANY, type='A', zone_id=self.zone_id,
name='private-v4.example.com.') name='private-v4.example.com.')
def test_formatv6(self): def test_formatv6(self):
event_type = 'compute.instance.create.end' event_type = 'compute.instance.create.end'
@ -182,12 +182,12 @@ class NovaFixedHandlerTest(TestCase, NotificationHandlerMixin):
fixture = self.get_notification_fixture('nova', event_type) fixture = self.get_notification_fixture('nova', event_type)
with mock.patch.object( with mock.patch.object(
self.plugin, '_create_or_update_recordset') as finder: self.plugin, '_create_or_update_recordset') as finder:
with mock.patch.object(self.plugin.central_api, with mock.patch.object(self.plugin.central_api,
'create_record'): 'create_record'):
finder.return_value = {'id': 'fakeid'} finder.return_value = {'id': 'fakeid'}
self.plugin.process_notification( self.plugin.process_notification(
self.admin_context.to_dict(), self.admin_context.to_dict(),
event_type, fixture['payload_v6']) event_type, fixture['payload_v6'])
finder.assert_called_once_with( finder.assert_called_once_with(
mock.ANY, mock.ANY, type='AAAA', zone_id=self.zone_id, mock.ANY, mock.ANY, type='AAAA', zone_id=self.zone_id,
name='private-v6.example.com.') name='private-v6.example.com.')

View File

@ -67,18 +67,18 @@ class NSD4BackendTestCase(designate.tests.TestCase):
mock_connect.return_value = mock.sentinel.client mock_connect.return_value = mock.sentinel.client
mock_ssl.return_value = sock mock_ssl.return_value = sock
sock.makefile.return_value = stream sock.makefile.return_value = stream
if command_context is 'create_fail': if command_context == 'create_fail':
stream.read.return_value = 'goat' stream.read.return_value = 'goat'
else: else:
stream.read.return_value = 'ok' stream.read.return_value = 'ok'
if command_context is 'create': if command_context == 'create':
self.backend.create_zone(self.context, self.zone) self.backend.create_zone(self.context, self.zone)
command = 'NSDCT1 addzone %s test-pattern\n' % self.zone.name command = 'NSDCT1 addzone %s test-pattern\n' % self.zone.name
elif command_context is 'delete': elif command_context == 'delete':
self.backend.delete_zone(self.context, self.zone) self.backend.delete_zone(self.context, self.zone)
command = 'NSDCT1 delzone %s\n' % self.zone.name command = 'NSDCT1 delzone %s\n' % self.zone.name
elif command_context is 'create_fail': elif command_context == 'create_fail':
self.assertRaises(exceptions.Backend, self.assertRaises(exceptions.Backend,
self.backend.create_zone, self.backend.create_zone,
self.context, self.zone) self.context, self.zone)

View File

@ -198,8 +198,8 @@ class MockRecord(object):
class MockPool(object): class MockPool(object):
ns_records = [MockRecord(), ] ns_records = [MockRecord(), ]
# Fixtures
# Fixtures
fx_mdns_api = fixtures.MockPatch('designate.central.service.mdns_rpcapi') fx_mdns_api = fixtures.MockPatch('designate.central.service.mdns_rpcapi')
mdns_api = mock.PropertyMock( mdns_api = mock.PropertyMock(

View File

@ -243,7 +243,7 @@ class TestDoAfxr(oslotest.base.BaseTestCase):
self.assertRaisesRegex( self.assertRaisesRegex(
exceptions.XFRFailure, exceptions.XFRFailure,
'XFR failed for example.com. No servers in \[\] was reached.', r'XFR failed for example.com. No servers in \[\] was reached.',
dnsutils.do_axfr, 'example.com', masters, dnsutils.do_axfr, 'example.com', masters,
) )

View File

@ -158,7 +158,7 @@ def deep_dict_merge(a, b):
for k, v in b.items(): for k, v in b.items():
if k in result and isinstance(result[k], dict): if k in result and isinstance(result[k], dict):
result[k] = deep_dict_merge(result[k], v) result[k] = deep_dict_merge(result[k], v)
else: else:
result[k] = copy.deepcopy(v) result[k] = copy.deepcopy(v)

View File

@ -57,5 +57,6 @@ def main():
) )
server.serve_forever() server.serve_forever()
if __name__ == "__main__": if __name__ == "__main__":
sys.exit(main()) sys.exit(main())

View File

@ -3,7 +3,7 @@
# process, which may cause wedges in the gate later. # process, which may cause wedges in the gate later.
# Hacking already pins down pep8, pyflakes and flake8 # Hacking already pins down pep8, pyflakes and flake8
hacking>=1.1.0,<1.2.0 # Apache-2.0 hacking>=3.0,<3.1.0 # Apache-2.0
coverage!=4.4,>=4.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0
fixtures>=3.0.0 # Apache-2.0/BSD fixtures>=3.0.0 # Apache-2.0/BSD
mock>=2.0.0 # BSD mock>=2.0.0 # BSD

17
tox.ini
View File

@ -173,14 +173,25 @@ ignore-path = .venv,.git,.tox,*designate/locale*,*lib/python*,*designate.egg*,ap
# H904 Wrap long lines in parentheses instead of a backslash # H904 Wrap long lines in parentheses instead of a backslash
# E126 continuation line over-indented for hanging indent # E126 continuation line over-indented for hanging indent
# E128 continuation line under-indented for visual indent # E128 continuation line under-indented for visual indent
# W504 line break after binary operator
ignore = H105,H302,H306,H238,H402,H404,H405,H501,H904,E126,E128 ignore = H105,H302,H306,H238,H402,H404,H405,H501,H904,E126,E128,W504
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,tools,.ropeproject exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,tools,.ropeproject
[hacking] [hacking]
local-check-factory = designate.hacking.checks.factory
import_exceptions = designate.i18n import_exceptions = designate.i18n
[flake8:local-plugins]
extension =
D701 = checks:mutable_default_arguments
D703 = checks:check_explicit_underscore_import
D704 = checks:no_import_graduated_oslo_libraries
D705 = checks:use_timeutils_utcnow
D706 = checks:no_translate_debug_logs
D707 = checks:check_no_basestring
D708 = checks:check_python3_xrange
D709 = checks:check_no_log_audit
paths = ./designate/hacking
[testenv:lower-constraints] [testenv:lower-constraints]
deps = deps =
-c{toxinidir}/lower-constraints.txt -c{toxinidir}/lower-constraints.txt