style(pep8): remove identation ignores

This patch set removes few pep8/flake8 ignored rules and implemented
the fix in the code to address those rules.

Change-Id: I2e613acd760818a6e18288d284f6224c38c4353a
Signed-off-by: Tin Lam <tin@irrational.io>
This commit is contained in:
Tin Lam 2018-05-18 15:40:16 -05:00
parent a552bf2a0f
commit 33e2203f5e
15 changed files with 53 additions and 43 deletions

View File

@ -191,7 +191,7 @@ class Manager(object):
try:
return (
list(yaml.safe_load_all(body))
if many else yaml.safe_load(body)
if many else yaml.safe_load(body)
)
except yaml.YAMLError:
return None

View File

@ -56,10 +56,9 @@ def register_opts(conf):
def list_opts():
opts = {None: default_opts,
barbican_group: barbican_opts +
ks_loading.get_session_conf_options() +
ks_loading.get_auth_common_conf_options() +
ks_loading.get_auth_plugin_conf_options(
'v3password')}
ks_loading.get_session_conf_options() +
ks_loading.get_auth_common_conf_options() +
ks_loading.get_auth_plugin_conf_options('v3password')}
return opts

View File

@ -145,7 +145,7 @@ class YAMLTranslator(HookableMiddlewareMixin, object):
if requires_content_type:
content_type = (req.content_type.split(';', 1)[0].strip()
if req.content_type else '')
if req.content_type else '')
if not content_type:
raise falcon.HTTPMissingHeader('Content-Type')

View File

@ -127,7 +127,7 @@ def require_unique_document_schema(schema=None):
conflicting_names = [
x.meta for x in documents
if x.meta not in existing_document_names and
x.schema.startswith(schema)
x.schema.startswith(schema)
]
if existing_document_names and conflicting_names:
raise errors.SingletonDocumentConflict(
@ -976,7 +976,7 @@ def revision_rollback(revision_id, latest_revision, session=None):
doc_diff = {}
for orig_doc in orig_revision['documents']:
if ((orig_doc['data_hash'], orig_doc['metadata_hash'])
not in latest_revision_hashes):
not in latest_revision_hashes):
doc_diff[orig_doc['id']] = True
else:
doc_diff[orig_doc['id']] = False
@ -1036,7 +1036,7 @@ def _get_validation_policies_for_revision(revision_id, session=None):
# Otherwise return early.
LOG.debug('Failed to find a ValidationPolicy for revision ID %s. '
'Only the "%s" results will be included in the response.',
revision_id, types.DECKHAND_SCHEMA_VALIDATION)
revision_id, types.DECKHAND_SCHEMA_VALIDATION)
validation_policies = []
return validation_policies

View File

@ -155,8 +155,9 @@ def __build_tables(blob_type_obj, blob_type_list):
ondelete='CASCADE'),
nullable=False)
revision_id = Column(
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=False)
Integer,
ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=False)
# Used for documents that haven't changed across revisions but still
# have been carried over into newer revisions. This is necessary in
# order to roll back to previous revisions or to generate a revision
@ -167,8 +168,9 @@ def __build_tables(blob_type_obj, blob_type_list):
# still being able to roll back to all the documents that exist in a
# specific revision or generate an accurate revision diff report.
orig_revision_id = Column(
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=True)
Integer,
ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=True)
@hybrid_property
def bucket_name(self):
@ -201,8 +203,9 @@ def __build_tables(blob_type_obj, blob_type_list):
validator = Column(blob_type_obj, nullable=False)
errors = Column(blob_type_list, nullable=False, default=[])
revision_id = Column(
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=False)
Integer,
ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=False)
this_module = sys.modules[__name__]
tables = [Bucket, Document, Revision, RevisionTag, Validation]

View File

@ -186,8 +186,8 @@ class DataSchemaValidator(GenericValidator):
continue
if 'data' not in data_schema:
continue
schema_prefix, schema_version = _get_schema_parts(data_schema,
'metadata.name')
schema_prefix, schema_version = _get_schema_parts(
data_schema, 'metadata.name')
schema_map[schema_version].setdefault(schema_prefix,
data_schema.data)
@ -308,7 +308,7 @@ class DataSchemaValidator(GenericValidator):
if not schemas_to_use:
LOG.debug('Document schema %s not recognized by %s. No further '
'validation required.', document.schema,
self.__class__.__name__)
self.__class__.__name__)
for schema in schemas_to_use:
is_builtin_schema = schema not in self._external_data_schemas

View File

@ -146,7 +146,7 @@ class DocumentLayering(object):
current_parent = self._documents_by_index.get(parent_meta, None)
if current_parent:
if (self._layer_order.index(parent.layer) >
self._layer_order.index(current_parent.layer)):
self._layer_order.index(current_parent.layer)):
self._parents[child.meta] = parent.meta
all_children[child] -= 1
else:
@ -304,7 +304,7 @@ class DocumentLayering(object):
# document itself then return the parent.
use_replacement = (
parent and parent.has_replacement and
parent.replaced_by is not doc
parent.replaced_by is not doc
)
if use_replacement:
parent = parent.replaced_by
@ -481,7 +481,7 @@ class DocumentLayering(object):
else:
substitution_sources = [
d for d in self._documents_by_index.values()
if not d.is_abstract
if not d.is_abstract
]
substitution_sources = self._calc_replacements_and_substitutions(
@ -709,7 +709,7 @@ class DocumentLayering(object):
# Return only concrete documents and non-replacements.
return [d for d in self._sorted_documents
if d.is_abstract is False and d.has_replacement is False]
if d.is_abstract is False and d.has_replacement is False]
@property
def documents(self):

View File

@ -61,9 +61,9 @@ class SecretsManager(object):
secret_uuid = None
return (
isinstance(secret_ref, six.string_types) and
cls._url_re.match(secret_ref) and
'secrets' in secret_ref and
uuidutils.is_uuid_like(secret_uuid)
cls._url_re.match(secret_ref) and
'secrets' in secret_ref and
uuidutils.is_uuid_like(secret_uuid)
)
@classmethod

View File

@ -214,7 +214,7 @@ class TestDocuments(base.TestDbBase):
# Verify that all the expected documents were deleted.
self.assertEqual(
sorted([(d['metadata']['name'], d['schema'])
for d in created_documents]),
for d in created_documents]),
sorted([(d['name'], d['schema']) for d in deleted_documents]))
# Verify that all their attributes have been cleared and that the

View File

@ -916,8 +916,8 @@ class TestDocumentLayering3LayersAbstractConcrete(TestDocumentLayering):
"actions": [{"method": "replace", "path": ".b"}]}
}
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False,
region_abstract=False)
documents = doc_factory.gen_test(
mapping, site_abstract=False, region_abstract=False)
site_expected = {"a": {"x": 1, "y": 2, "z": 3}, "b": 4}
region_expected = {"a": {"x": 1, "y": 2, "z": 3}, "b": 5}

View File

@ -34,7 +34,7 @@ class TestDocumentLayeringReplacementNegative(
documents[2]['metadata']['name'] = 'bar'
error_re = (r'.*Document replacement requires that both documents '
'have the same `schema` and `metadata.name`.')
'have the same `schema` and `metadata.name`.')
self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re,
self._test_layering, documents)
@ -44,7 +44,7 @@ class TestDocumentLayeringReplacementNegative(
documents[2]['metadata']['schema'] = 'example/Other/v1'
error_re = (r'Document replacement requires that both documents '
'have the same `schema` and `metadata.name`.')
'have the same `schema` and `metadata.name`.')
self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re,
self._test_layering, documents)
@ -76,7 +76,7 @@ class TestDocumentLayeringReplacementNegative(
documents[2]['metadata']['layeringDefinition'].pop('parentSelector')
error_re = (r'Document replacement requires that the document with '
'`replacement: true` have a parent.')
'`replacement: true` have a parent.')
self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re,
self._test_layering, documents)
@ -96,6 +96,6 @@ class TestDocumentLayeringReplacementNegative(
documents[3]['metadata']['replacement'] = True
error_re = (r'A replacement document cannot itself be replaced by '
'another document.')
'another document.')
self.assertRaisesRegexp(errors.InvalidDocumentReplacement, error_re,
self._test_layering, documents)

View File

@ -114,9 +114,10 @@ class TestDocumentLayeringNegative(
'parentSelector'] = parent_selector
layering.DocumentLayering(documents, validate=False)
self.assertTrue(any('Could not find parent for document' in
mock_log.debug.mock_calls[x][1][0])
for x in range(len(mock_log.debug.mock_calls)))
self.assertTrue(
any('Could not find parent for document' in
mock_log.debug.mock_calls[x][1][0])
for x in range(len(mock_log.debug.mock_calls)))
mock_log.debug.reset_mock()
@mock.patch.object(layering, 'LOG', autospec=True)
@ -129,9 +130,10 @@ class TestDocumentLayeringNegative(
documents[1]['metadata']['labels'] = parent_label
layering.DocumentLayering(documents, validate=False)
self.assertTrue(any('Could not find parent for document' in
mock_log.debug.mock_calls[x][1][0])
for x in range(len(mock_log.debug.mock_calls)))
self.assertTrue(
any('Could not find parent for document' in
mock_log.debug.mock_calls[x][1][0])
for x in range(len(mock_log.debug.mock_calls)))
mock_log.debug.reset_mock()
def test_layering_duplicate_parent_selector_2_layer(self):
@ -168,9 +170,10 @@ class TestDocumentLayeringNegative(
'parentSelector'] = self_ref
layering.DocumentLayering(documents, validate=False)
self.assertTrue(any('Could not find parent for document' in
mock_log.debug.mock_calls[x][1][0])
for x in range(len(mock_log.debug.mock_calls)))
self.assertTrue(
any('Could not find parent for document' in
mock_log.debug.mock_calls[x][1][0])
for x in range(len(mock_log.debug.mock_calls)))
def test_layering_without_layering_policy_raises_exc(self):
doc_factory = factories.DocumentFactory(1, [1])

View File

@ -108,7 +108,8 @@ class RealPolicyFixture(fixtures.Fixture):
"""
if not (set(self.expected_policy_actions) ==
set(self.actual_policy_actions)):
error_msg = ('The expected policy actions passed to '
error_msg = (
'The expected policy actions passed to '
'`self.policy.set_rules` do not match the policy actions '
'that were actually enforced by Deckhand. Set of expected '
'policies %s should be equal to set of actual policies: %s. '

View File

@ -0,0 +1,4 @@
---
fixes:
- |
Removed indentation rules E127, E128, E129 and E131 from pep8 exclusion.

View File

@ -96,7 +96,7 @@ commands = flake8 {posargs}
# [H210] Require autospec, spec, or spec_set in mock.patch/mock.patch.object calls
# [H904] Delay string interpolations at logging calls.
enable-extensions = H106,H203,H204,H205,H210,H904
ignore = E127,E128,E129,E131,H405
ignore = H405
exclude = .venv,.git,.tox,dist,*lib/python*,*egg,build,releasenotes,docs,alembic/versions
[testenv:docs]