Fix: Lint errors sometimes caused exceptions
* Fix inconsistencies of lint errors WRT string vs tuple * Adds error codes to all lint errors * Update Deckhand version Change-Id: Ib672be7f1d8805e5e6afbb8f942693995aa45591
This commit is contained in:
parent
eb8c4a2b17
commit
26d3174d33
@ -1,5 +1,3 @@
|
||||
from copy import copy
|
||||
|
||||
try:
|
||||
if GLOBAL_CONTEXT:
|
||||
pass
|
||||
@ -15,15 +13,15 @@ def get_primary_repo():
|
||||
|
||||
|
||||
def set_primary_repo(r):
|
||||
GLOBAL_CONTEXT['primary_repo'] = r
|
||||
GLOBAL_CONTEXT['primary_repo'] = r.rstrip('/') + '/'
|
||||
|
||||
|
||||
def set_auxiliary_repo_list(a):
|
||||
GLOBAL_CONTEXT['aux_repos'] = copy(a)
|
||||
GLOBAL_CONTEXT['aux_repos'] = [r.rstrip('/') + '/' for r in a]
|
||||
|
||||
|
||||
def add_auxiliary_repo(a):
|
||||
GLOBAL_CONTEXT['aux_repos'].append(a)
|
||||
GLOBAL_CONTEXT['aux_repos'].append(a.rstrip('/') + '/')
|
||||
|
||||
|
||||
def get_auxiliary_repo_list():
|
||||
|
9
src/bin/pegleg/pegleg/engine/errorcodes.py
Normal file
9
src/bin/pegleg/pegleg/engine/errorcodes.py
Normal file
@ -0,0 +1,9 @@
|
||||
SCHEMA_STORAGE_POLICY_MISMATCH_FLAG = 'P001'
|
||||
DECKHAND_RENDERING_INCOMPLETE_FLAG = 'P002'
|
||||
REPOS_MISSING_DIRECTORIES_FLAG = 'P003'
|
||||
DECKHAND_DUPLICATE_SCHEMA = 'P004'
|
||||
DECKHAND_RENDER_EXCEPTION = 'P005'
|
||||
FILE_MISSING_YAML_DOCUMENT_HEADER = 'P006'
|
||||
FILE_CONTAINS_INVALID_YAML = 'P007'
|
||||
DOCUMENT_LAYER_MISMATCH = 'P008'
|
||||
SECRET_NOT_ENCRYPTED_POLICY = 'P009'
|
@ -6,10 +6,12 @@ import yaml
|
||||
|
||||
from pegleg import config
|
||||
from pegleg.engine import util
|
||||
|
||||
SCHEMA_STORAGE_POLICY_MISMATCH_FLAG = 'P001'
|
||||
DECKHAND_RENDERING_INCOMPLETE_FLAG = 'P002'
|
||||
REPOS_MISSING_DIRECTORIES_FLAG = 'P003'
|
||||
from pegleg.engine.errorcodes import DOCUMENT_LAYER_MISMATCH
|
||||
from pegleg.engine.errorcodes import FILE_CONTAINS_INVALID_YAML
|
||||
from pegleg.engine.errorcodes import FILE_MISSING_YAML_DOCUMENT_HEADER
|
||||
from pegleg.engine.errorcodes import REPOS_MISSING_DIRECTORIES_FLAG
|
||||
from pegleg.engine.errorcodes import SCHEMA_STORAGE_POLICY_MISMATCH_FLAG
|
||||
from pegleg.engine.errorcodes import SECRET_NOT_ENCRYPTED_POLICY
|
||||
|
||||
__all__ = ['full']
|
||||
|
||||
@ -23,37 +25,25 @@ DECKHAND_SCHEMAS = {
|
||||
|
||||
|
||||
def full(fail_on_missing_sub_src=False, exclude_lint=None, warn_lint=None):
|
||||
errors = []
|
||||
warns = []
|
||||
|
||||
messages = _verify_file_contents()
|
||||
messages = []
|
||||
# If policy is cleartext and error is added this will put
|
||||
# that particular message into the warns list and all other will
|
||||
# be added to the error list if SCHMEA_STORAGE_POLICY_MITCHMATCH_FLAG
|
||||
for msg in messages:
|
||||
if (SCHEMA_STORAGE_POLICY_MISMATCH_FLAG in warn_lint
|
||||
and SCHEMA_STORAGE_POLICY_MISMATCH_FLAG == msg[0]):
|
||||
warns.append(msg)
|
||||
else:
|
||||
errors.append(msg)
|
||||
messages.extend(_verify_file_contents())
|
||||
|
||||
# Deckhand Rendering completes without error
|
||||
if DECKHAND_RENDERING_INCOMPLETE_FLAG in warn_lint:
|
||||
warns.extend(
|
||||
[(DECKHAND_RENDERING_INCOMPLETE_FLAG, x)
|
||||
for x in _verify_deckhand_render(fail_on_missing_sub_src)])
|
||||
elif DECKHAND_RENDERING_INCOMPLETE_FLAG not in exclude_lint:
|
||||
errors.extend(
|
||||
[(DECKHAND_RENDERING_INCOMPLETE_FLAG, x)
|
||||
for x in _verify_deckhand_render(fail_on_missing_sub_src)])
|
||||
messages.extend(_verify_deckhand_render(fail_on_missing_sub_src))
|
||||
|
||||
# All repos contain expected directories
|
||||
if REPOS_MISSING_DIRECTORIES_FLAG in warn_lint:
|
||||
warns.extend([(REPOS_MISSING_DIRECTORIES_FLAG, x)
|
||||
for x in _verify_no_unexpected_files()])
|
||||
elif REPOS_MISSING_DIRECTORIES_FLAG not in exclude_lint:
|
||||
errors.extend([(REPOS_MISSING_DIRECTORIES_FLAG, x)
|
||||
for x in _verify_no_unexpected_files()])
|
||||
messages.extend(_verify_no_unexpected_files())
|
||||
|
||||
errors = []
|
||||
warns = []
|
||||
for code, message in messages:
|
||||
if code in warn_lint:
|
||||
warns.append('%s: %s' % (code, message))
|
||||
elif code not in exclude_lint:
|
||||
errors.append('%s: %s' % (code, message))
|
||||
|
||||
if errors:
|
||||
raise click.ClickException('\n'.join(
|
||||
@ -99,15 +89,18 @@ def _verify_single_file(filename, schemas):
|
||||
LOG.debug("Validating file %s." % filename)
|
||||
with open(filename) as f:
|
||||
if not f.read(4) == '---\n':
|
||||
errors.append('%s does not begin with YAML beginning of document '
|
||||
'marker "---".' % filename)
|
||||
errors.append((FILE_MISSING_YAML_DOCUMENT_HEADER,
|
||||
'%s does not begin with YAML beginning of document '
|
||||
'marker "---".' % filename))
|
||||
f.seek(0)
|
||||
try:
|
||||
documents = yaml.safe_load_all(f)
|
||||
for document in documents:
|
||||
errors.extend(_verify_document(document, schemas, filename))
|
||||
documents = list(yaml.safe_load_all(f))
|
||||
except Exception as e:
|
||||
errors.append('%s is not valid yaml: %s' % (filename, e))
|
||||
errors.append((FILE_CONTAINS_INVALID_YAML,
|
||||
'%s is not valid yaml: %s' % (filename, e)))
|
||||
|
||||
for document in documents:
|
||||
errors.extend(_verify_document(document, schemas, filename))
|
||||
|
||||
return errors
|
||||
|
||||
@ -130,7 +123,7 @@ def _verify_document(document, schemas, filename):
|
||||
layer = _layer(document)
|
||||
if layer is not None and layer != _expected_layer(filename):
|
||||
errors.append(
|
||||
('N/A',
|
||||
(DOCUMENT_LAYER_MISMATCH,
|
||||
'%s (document %s) had unexpected layer "%s", expected "%s"' %
|
||||
(filename, name, layer, _expected_layer(filename))))
|
||||
|
||||
@ -146,7 +139,7 @@ def _verify_document(document, schemas, filename):
|
||||
storage_policy)))
|
||||
|
||||
if not _filename_in_section(filename, 'secrets/'):
|
||||
errors.append(('N/A',
|
||||
errors.append((SECRET_NOT_ENCRYPTED_POLICY,
|
||||
'%s (document %s) is a secret, is not stored in a '
|
||||
'secrets path' % (filename, name)))
|
||||
return errors
|
||||
@ -176,8 +169,8 @@ def _layer(data):
|
||||
|
||||
def _expected_layer(filename):
|
||||
for r in config.all_repos():
|
||||
if filename.startswith(r + "/"):
|
||||
partial_name = filename[len(r) + 1:]
|
||||
if filename.startswith(r):
|
||||
partial_name = filename[len(r):]
|
||||
parts = os.path.normpath(partial_name).split(os.sep)
|
||||
return parts[0]
|
||||
|
||||
@ -192,5 +185,8 @@ def _load_schemas():
|
||||
|
||||
def _filename_in_section(filename, section):
|
||||
directory = util.files.directory_for(path=filename)
|
||||
rest = filename[len(directory) + 1:]
|
||||
return rest is not None and rest.startswith(section)
|
||||
if directory is not None:
|
||||
rest = filename[len(directory) + 1:]
|
||||
return rest is not None and rest.startswith(section)
|
||||
else:
|
||||
return False
|
||||
|
@ -1,3 +1,5 @@
|
||||
from pegleg.engine.errorcodes import DECKHAND_DUPLICATE_SCHEMA
|
||||
from pegleg.engine.errorcodes import DECKHAND_RENDER_EXCEPTION
|
||||
from deckhand.engine import layering
|
||||
from deckhand import errors as dh_errors
|
||||
|
||||
@ -15,7 +17,8 @@ def load_schemas_from_docs(documents):
|
||||
if document.get('schema', '') == SCHEMA_SCHEMA:
|
||||
name = document['metadata']['name']
|
||||
if name in schema_set:
|
||||
errors.append('Duplicate schema specified for: %s' % name)
|
||||
errors.append((DECKHAND_DUPLICATE_SCHEMA,
|
||||
'Duplicate schema specified for: %s' % name))
|
||||
|
||||
schema_set[name] = document['data']
|
||||
|
||||
@ -40,7 +43,8 @@ def deckhand_render(documents=[],
|
||||
validate=validate)
|
||||
rendered_documents = [dict(d) for d in deckhand_eng.render()]
|
||||
except dh_errors.DeckhandException as e:
|
||||
errors.append('An unknown Deckhand exception occurred while trying'
|
||||
' to render documents: %s' % str(e))
|
||||
errors.append((DECKHAND_RENDER_EXCEPTION,
|
||||
'An unknown Deckhand exception occurred while trying'
|
||||
' to render documents: %s' % str(e)))
|
||||
|
||||
return rendered_documents, errors
|
||||
|
@ -158,8 +158,8 @@ def list_sites(primary_repo_base=None):
|
||||
|
||||
def directory_for(*, path):
|
||||
for r in config.all_repos():
|
||||
if path.startswith(r + "/"):
|
||||
partial_path = path[len(r) + 1:]
|
||||
if path.startswith(r):
|
||||
partial_path = path[len(r):]
|
||||
parts = os.path.normpath(partial_path).split(os.sep)
|
||||
depth = DIR_DEPTHS.get(parts[0])
|
||||
if depth is not None:
|
||||
|
@ -1,4 +1,4 @@
|
||||
click==6.7
|
||||
jsonschema==2.6.0
|
||||
pyyaml==3.12
|
||||
git+https://github.com/att-comdev/deckhand.git@b9845fa72c079dadb05e741aa9134cf03bf3ce48#egg=deckhand
|
||||
git+https://github.com/att-comdev/deckhand.git@99e3064eda9da0227780b57ee30baeb264b3040d#egg=deckhand
|
||||
|
@ -26,14 +26,16 @@ def test_lint_excludes_P001(*args):
|
||||
exclude_lint = ['P001']
|
||||
config.set_primary_repo('../pegleg/site_yamls/')
|
||||
|
||||
code_1 = 'X001'
|
||||
msg_1 = 'is a secret, but has unexpected storagePolicy: "cleartext"'
|
||||
code_2 = 'X002'
|
||||
msg_2 = 'test msg'
|
||||
msgs = [msg_1, msg_2]
|
||||
msgs = [(code_1, msg_1), (code_2, msg_2)]
|
||||
|
||||
with mock.patch.object(lint, '_verify_file_contents', return_value=msgs) as mock_methed:
|
||||
with mock.patch.object(
|
||||
lint, '_verify_file_contents', return_value=msgs) as mock_methed:
|
||||
with pytest.raises(click.ClickException) as expected_exc:
|
||||
results = lint.full(False, exclude_lint, [])
|
||||
e = str(expected_exc)
|
||||
assert msg_1 in expected_exc
|
||||
assert msg_2 in expected_exc
|
||||
|
||||
@ -42,17 +44,23 @@ def test_lint_excludes_P001(*args):
|
||||
def test_lint_excludes_P002(*args):
|
||||
exclude_lint = ['P002']
|
||||
config.set_primary_repo('../pegleg/site_yamls/')
|
||||
with mock.patch.object(lint, '_verify_deckhand_render') as mock_method:
|
||||
with mock.patch.object(
|
||||
lint,
|
||||
'_verify_deckhand_render',
|
||||
return_value=[('P002', 'test message')]) as mock_method:
|
||||
lint.full(False, exclude_lint, [])
|
||||
mock_method.assert_not_called()
|
||||
mock_method.assert_called()
|
||||
|
||||
|
||||
@mock.patch.object(lint, '_verify_deckhand_render', return_value=[])
|
||||
def test_lint_excludes_P003(*args):
|
||||
exclude_lint = ['P003']
|
||||
with mock.patch.object(lint, '_verify_no_unexpected_files') as mock_method:
|
||||
with mock.patch.object(
|
||||
lint,
|
||||
'_verify_no_unexpected_files',
|
||||
return_value=[('P003', 'test message')]) as mock_method:
|
||||
lint.full(False, exclude_lint, [])
|
||||
mock_method.assert_not_called()
|
||||
mock_method.assert_called()
|
||||
|
||||
|
||||
@mock.patch.object(lint, '_verify_deckhand_render', return_value=[])
|
||||
@ -61,14 +69,16 @@ def test_lint_warns_P001(*args):
|
||||
warn_lint = ['P001']
|
||||
config.set_primary_repo('../pegleg/site_yamls/')
|
||||
|
||||
code_1 = 'X001'
|
||||
msg_1 = 'is a secret, but has unexpected storagePolicy: "cleartext"'
|
||||
code_2 = 'X002'
|
||||
msg_2 = 'test msg'
|
||||
msgs = [msg_1, msg_2]
|
||||
msgs = [(code_1, msg_1), (code_2, msg_2)]
|
||||
|
||||
with mock.patch.object(lint, '_verify_file_contents', return_value=msgs) as mock_methed:
|
||||
with mock.patch.object(
|
||||
lint, '_verify_file_contents', return_value=msgs) as mock_methed:
|
||||
with pytest.raises(click.ClickException) as expected_exc:
|
||||
lint.full(False, [], warn_lint)
|
||||
e = str(expected_exc)
|
||||
assert msg_1 not in expected_exc
|
||||
assert msg_2 in expected_exc
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
[tox]
|
||||
envlist = py35, lint
|
||||
skipsdist = True
|
||||
|
||||
[testenv]
|
||||
deps =
|
||||
@ -16,14 +17,14 @@ commands =
|
||||
[testenv:fmt]
|
||||
deps = yapf==0.20.0
|
||||
commands =
|
||||
yapf --style=pep8 -ir {toxinidir}/pegleg
|
||||
yapf --style=pep8 -ir {toxinidir}/pegleg {toxinidir}/tests
|
||||
|
||||
[testenv:lint]
|
||||
deps =
|
||||
yapf==0.20.0
|
||||
flake8==3.5.0
|
||||
commands =
|
||||
yapf -rd {toxinidir}/pegleg
|
||||
yapf -rd {toxinidir}/pegleg {toxinidir}/tests
|
||||
flake8 {toxinidir}/pegleg
|
||||
|
||||
[flake8]
|
||||
|
Loading…
x
Reference in New Issue
Block a user