[focal] Python modules sync with Airship project

- uplifted/downgraded some python modules
- fixed falcon.API deprecation - -> falcon.App
- uplifted deckhand reference for python deps
- fixed formatting style  using yapf linter
- added bindep role and bindep.txt file with required deps
- fixed quai docker image publishing
- re-enabled openstack-tox-py38 gate job

Change-Id: I0e248182efad75630721a1291bc86a5edc79c22a
This commit is contained in:
Sergiy Markin 2023-04-07 15:33:51 +00:00
parent 7e4bf90233
commit 32ad8a96b0
37 changed files with 440 additions and 338 deletions

View File

@ -19,7 +19,7 @@ formats:
# Optionally set the version of Python and requirements required to build your docs # Optionally set the version of Python and requirements required to build your docs
python: python:
version: 3.7 version: 3.8
install: install:
- requirements: doc/requirements.txt - requirements: doc/requirements.txt
- requirements: requirements.txt - requirements: requirements.txt

View File

@ -46,6 +46,7 @@ tests-unit: external-deps
tox -e py38 tox -e py38
external-deps: external-deps:
export DEBIAN_FRONTEND=noninteractive
./tools/install-external-deps.sh ./tools/install-external-deps.sh
tests-pep8: tests-pep8:
@ -69,10 +70,12 @@ helm-init-%: helm-toolkit
lint: helm-lint gate-lint lint: helm-lint gate-lint
gate-lint: gate-lint-deps gate-lint: gate-lint-deps
export DEBIAN_FRONTEND=noninteractive
tox -e gate-lint tox -e gate-lint
gate-lint-deps: gate-lint-deps:
sudo apt-get install -y --no-install-recommends shellcheck tox sudo apt install -y --no-install-recommends shellcheck
sudo pip3 install tox
helm-lint: $(addprefix helm-lint-,$(CHARTS)) helm-lint: $(addprefix helm-lint-,$(CHARTS))

13
bindep.txt Normal file
View File

@ -0,0 +1,13 @@
# This file contains runtime (non-python) dependencies
# More info at: https://docs.openstack.org/infra/bindep/readme.html
# PlantUML is used for documentation builds, graphviz is it's soft dependancy
plantuml
graphviz
libffi-dev [test platform:dpkg]
libkrb5-dev [platform:dpkg]
libpq-dev [platform:dpkg]
libsasl2-dev [platform:dpkg]
libssl-dev [platform:dpkg]
libre2-dev [platform:dpkg]
apt-utils [platform:dpkg]

View File

@ -1,5 +1,5 @@
apiVersion: v1 apiVersion: v1
description: The Promenade API description: The Promenade API
name: promenade name: promenade
version: 0.1.0 version: 0.1.3
appVersion: 1.1.0 appVersion: 1.1.0

View File

@ -1,5 +1,4 @@
sphinx>=1.6.2 sphinx
sphinx-rtd-theme==1.1.1 sphinx-rtd-theme==0.5.0
falcon>=1.4.1 oslo.config<=8.7.1
oslo.config==8.7.1 MarkupSafe<2.1.0
markupsafe==2.0.1

View File

@ -29,6 +29,7 @@ CACHE = CacheManager(**parse_cache_config_options(CACHE_OPTS))
class Builder: class Builder:
def __init__(self, config, *, validators=False): def __init__(self, config, *, validators=False):
self.config = config self.config = config
self.validators = validators self.validators = validators
@ -64,9 +65,8 @@ class Builder:
@property @property
def _file_specs(self): def _file_specs(self):
return itertools.chain( return itertools.chain(self.config.get_path('HostSystem:files', []),
self.config.get_path('HostSystem:files', []), self.config.get_path('Genesis:files', []))
self.config.get_path('Genesis:files', []))
def build_all(self, *, output_dir): def build_all(self, *, output_dir):
self.build_genesis(output_dir=output_dir) self.build_genesis(output_dir=output_dir)
@ -99,21 +99,23 @@ class Builder:
(encrypted_tarball, decrypt_setup_command, decrypt_command, (encrypted_tarball, decrypt_setup_command, decrypt_command,
decrypt_teardown_command) = _encrypt_genesis(sub_config, tarball) decrypt_teardown_command) = _encrypt_genesis(sub_config, tarball)
return renderer.render_template( return renderer.render_template(sub_config,
sub_config, template='scripts/genesis.sh',
template='scripts/genesis.sh', context={
context={ 'decrypt_command': decrypt_command,
'decrypt_command': decrypt_command, 'decrypt_setup_command':
'decrypt_setup_command': decrypt_setup_command, decrypt_setup_command,
'decrypt_teardown_command': decrypt_teardown_command, 'decrypt_teardown_command':
'encrypted_tarball': encrypted_tarball, decrypt_teardown_command,
}, 'encrypted_tarball':
roles=genesis_roles) encrypted_tarball,
},
roles=genesis_roles)
def _build_genesis_validate_script(self): def _build_genesis_validate_script(self):
sub_config = self.config.extract_genesis_config() sub_config = self.config.extract_genesis_config()
return renderer.render_template( return renderer.render_template(sub_config,
sub_config, template='scripts/validate-genesis.sh') template='scripts/validate-genesis.sh')
def build_node(self, node_document, *, output_dir): def build_node(self, node_document, *, output_dir):
node_name = node_document['metadata']['name'] node_name = node_document['metadata']['name']
@ -134,27 +136,30 @@ class Builder:
f['path'] for f in self.config.get_path('HostSystem:files', []) f['path'] for f in self.config.get_path('HostSystem:files', [])
] ]
file_specs = [self.file_cache[p] for p in file_spec_paths] file_specs = [self.file_cache[p] for p in file_spec_paths]
tarball = renderer.build_tarball_from_roles( tarball = renderer.build_tarball_from_roles(config=sub_config,
config=sub_config, roles=build_roles, file_specs=file_specs) roles=build_roles,
file_specs=file_specs)
(encrypted_tarball, decrypt_setup_command, decrypt_command, (encrypted_tarball, decrypt_setup_command, decrypt_command,
decrypt_teardown_command) = _encrypt_node(sub_config, tarball) decrypt_teardown_command) = _encrypt_node(sub_config, tarball)
return renderer.render_template( return renderer.render_template(sub_config,
sub_config, template='scripts/join.sh',
template='scripts/join.sh', context={
context={ 'decrypt_command': decrypt_command,
'decrypt_command': decrypt_command, 'decrypt_setup_command':
'decrypt_setup_command': decrypt_setup_command, decrypt_setup_command,
'decrypt_teardown_command': decrypt_teardown_command, 'decrypt_teardown_command':
'encrypted_tarball': encrypted_tarball, decrypt_teardown_command,
}, 'encrypted_tarball':
roles=build_roles) encrypted_tarball,
},
roles=build_roles)
def _build_node_validate_script(self, node_name): def _build_node_validate_script(self, node_name):
sub_config = self.config.extract_node_config(node_name) sub_config = self.config.extract_node_config(node_name)
return renderer.render_template( return renderer.render_template(sub_config,
sub_config, template='scripts/validate-join.sh') template='scripts/validate-join.sh')
def _encrypt_genesis(config, data): def _encrypt_genesis(config, data):

View File

@ -18,19 +18,19 @@ def promenade(*, verbose):
@promenade.command('build-all', help='Construct all scripts') @promenade.command('build-all', help='Construct all scripts')
@click.option( @click.option('-o',
'-o', '--output-dir',
'--output-dir', default='.',
default='.', type=click.Path(exists=True,
type=click.Path( file_okay=False,
exists=True, file_okay=False, dir_okay=True, resolve_path=True), dir_okay=True,
required=True, resolve_path=True),
help='Location to write complete cluster configuration.') required=True,
help='Location to write complete cluster configuration.')
@click.option('--validators', is_flag=True, help='Generate validation scripts') @click.option('--validators', is_flag=True, help='Generate validation scripts')
@click.option( @click.option('--leave-kubectl',
'--leave-kubectl', is_flag=True,
is_flag=True, help='Leave behind kubectl on joined nodes')
help='Leave behind kubectl on joined nodes')
@click.argument('config_files', nargs=-1, type=click.File('rb')) @click.argument('config_files', nargs=-1, type=click.File('rb'))
def build_all(*, config_files, leave_kubectl, output_dir, validators): def build_all(*, config_files, leave_kubectl, output_dir, validators):
debug = _debug() debug = _debug()
@ -49,23 +49,23 @@ def build_all(*, config_files, leave_kubectl, output_dir, validators):
@promenade.command('generate-certs', help='Generate a certs for a site') @promenade.command('generate-certs', help='Generate a certs for a site')
@click.option( @click.option('-o',
'-o', '--output-dir',
'--output-dir', type=click.Path(exists=True,
type=click.Path( file_okay=False,
exists=True, file_okay=False, dir_okay=True, resolve_path=True), dir_okay=True,
required=True, resolve_path=True),
help='Location to write *-certificates.yaml') required=True,
help='Location to write *-certificates.yaml')
@click.argument('config_files', nargs=-1, type=click.File('rb')) @click.argument('config_files', nargs=-1, type=click.File('rb'))
def generate_certs(*, config_files, output_dir): def generate_certs(*, config_files, output_dir):
debug = _debug() debug = _debug()
try: try:
c = config.Configuration.from_streams( c = config.Configuration.from_streams(debug=debug,
debug=debug, streams=config_files,
streams=config_files, substitute=True,
substitute=True, allow_missing_substitutions=True,
allow_missing_substitutions=True, validate=False)
validate=False)
g = generator.Generator(c) g = generator.Generator(c)
g.generate(output_dir) g.generate(output_dir)
except exceptions.PromenadeException as e: except exceptions.PromenadeException as e:

View File

@ -13,6 +13,7 @@ LOG = logging.getLogger(__name__)
class Configuration: class Configuration:
def __init__(self, def __init__(self,
*, *,
documents, documents,
@ -63,11 +64,10 @@ class Configuration:
def from_design_ref(cls, design_ref, ctx=None, **kwargs): def from_design_ref(cls, design_ref, ctx=None, **kwargs):
documents, use_dh_engine = dr.get_documents(design_ref, ctx) documents, use_dh_engine = dr.get_documents(design_ref, ctx)
return cls( return cls(documents=documents,
documents=documents, substitute=use_dh_engine,
substitute=use_dh_engine, validate=use_dh_engine,
validate=use_dh_engine, **kwargs)
**kwargs)
def __getitem__(self, path): def __getitem__(self, path):
return self.get_path( return self.get_path(
@ -105,8 +105,10 @@ class Configuration:
schema = 'promenade/%s/v1' % kind schema = 'promenade/%s/v1' % kind
for document in self.documents: for document in self.documents:
if _matches_filter( if _matches_filter(document,
document, schema=schema, labels=labels, name=name): schema=schema,
labels=labels,
name=name):
yield document yield document
def find(self, *args, **kwargs): def find(self, *args, **kwargs):
@ -122,12 +124,11 @@ class Configuration:
else: else:
LOG.debug('Excluding schema=%s metadata.name=%s', LOG.debug('Excluding schema=%s metadata.name=%s',
document['schema'], _mg(document, 'name')) document['schema'], _mg(document, 'name'))
return Configuration( return Configuration(debug=self.debug,
debug=self.debug, documents=documents,
documents=documents, leave_kubectl=self.leave_kubectl,
leave_kubectl=self.leave_kubectl, substitute=False,
substitute=False, validate=False)
validate=False)
def extract_node_config(self, name): def extract_node_config(self, name):
LOG.debug('Extracting node config for %s.', name) LOG.debug('Extracting node config for %s.', name)
@ -145,12 +146,11 @@ class Configuration:
continue continue
else: else:
documents.append(document) documents.append(document)
return Configuration( return Configuration(debug=self.debug,
debug=self.debug, documents=documents,
documents=documents, leave_kubectl=self.leave_kubectl,
leave_kubectl=self.leave_kubectl, substitute=False,
substitute=False, validate=False)
validate=False)
@property @property
def kubelet_name(self): def kubelet_name(self):

View File

@ -33,8 +33,8 @@ def start_api():
ContextMiddleware(), ContextMiddleware(),
LoggingMiddleware(), LoggingMiddleware(),
] ]
control_api = falcon.API( control_api = falcon.App(request_type=PromenadeRequest,
request_type=PromenadeRequest, middleware=middlewares) middleware=middlewares)
# v1.0 of Promenade API # v1.0 of Promenade API
v1_0_routes = [ v1_0_routes = [
@ -72,10 +72,9 @@ class VersionsResource(BaseResource):
""" """
def on_get(self, req, resp): def on_get(self, req, resp):
resp.body = self.to_json({ resp.body = self.to_json(
'v1.0': { {'v1.0': {
'path': '/api/v1.0', 'path': '/api/v1.0',
'status': 'stable' 'status': 'stable'
} }})
})
resp.status = falcon.HTTP_200 resp.status = falcon.HTTP_200

View File

@ -28,6 +28,7 @@ LOG = logging.getLogger(__name__)
class BaseResource(object): class BaseResource(object):
def on_options(self, req, resp, **kwargs): def on_options(self, req, resp, **kwargs):
""" """
Handle options requests Handle options requests
@ -56,8 +57,8 @@ class BaseResource(object):
LOG.info('Input message body: %s \nContext: %s' % LOG.info('Input message body: %s \nContext: %s' %
(raw_body, req.context)) (raw_body, req.context))
else: else:
LOG.info( LOG.info('No message body specified. \nContext: %s' %
'No message body specified. \nContext: %s' % req.context) req.context)
if has_input: if has_input:
# read the json and validate if necessary # read the json and validate if necessary
try: try:
@ -72,8 +73,8 @@ class BaseResource(object):
(raw_body, req.context)) (raw_body, req.context))
raise exc.InvalidFormatError( raise exc.InvalidFormatError(
title='JSON could not be decoded', title='JSON could not be decoded',
description='%s: Invalid JSON in body: %s' % (req.path, description='%s: Invalid JSON in body: %s' %
jex)) (req.path, jex))
else: else:
# No body passed as input. Fail validation if it was asekd for # No body passed as input. Fail validation if it was asekd for
if validate_json_schema is not None: if validate_json_schema is not None:

View File

@ -89,9 +89,10 @@ class ContextMiddleware(object):
""" """
def _format_uuid_string(self, string): def _format_uuid_string(self, string):
return (string.replace('urn:', '').replace('uuid:', return (string.replace('urn:',
'').strip('{}').replace( '').replace('uuid:',
'-', '').lower()) '').strip('{}').replace('-',
'').lower())
def _is_uuid_like(self, val): def _is_uuid_like(self, val):
try: try:
@ -115,16 +116,16 @@ class ContextMiddleware(object):
class LoggingMiddleware(object): class LoggingMiddleware(object):
def process_request(self, req, resp): def process_request(self, req, resp):
# don't log health checks # don't log health checks
if not req.url.endswith('/health'): if not req.url.endswith('/health'):
ctx = req.context ctx = req.context
LOG.info( LOG.info("Request: %s %s %s",
"Request: %s %s %s", req.method,
req.method, req.uri,
req.uri, req.query_string,
req.query_string, ctx=ctx)
ctx=ctx)
def process_response(self, req, resp, resource, req_succeeded): def process_response(self, req, resp, resource, req_succeeded):
ctx = req.context ctx = req.context
@ -132,10 +133,9 @@ class LoggingMiddleware(object):
if req.url.endswith('/health'): if req.url.endswith('/health'):
resp_code = self._get_resp_code(resp) resp_code = self._get_resp_code(resp)
if not resp_code == 204: if not resp_code == 204:
LOG.error( LOG.error('Health check has failed with response status %s',
'Health check has failed with response status %s', resp.status,
resp.status, ctx=ctx)
ctx=ctx)
else: else:
context_marker = getattr(ctx, 'context_marker', None) context_marker = getattr(ctx, 'context_marker', None)
request_id = getattr(ctx, 'request_id', None) request_id = getattr(ctx, 'request_id', None)
@ -149,12 +149,11 @@ class LoggingMiddleware(object):
resp.append_header('X-END-USER', end_user) resp.append_header('X-END-USER', end_user)
if user is not None: if user is not None:
resp.append_header('X-USER-NAME', user) resp.append_header('X-USER-NAME', user)
LOG.info( LOG.info("Response: %s %s %s",
"Response: %s %s %s", req.method,
req.method, req.uri,
req.uri, resp.status,
resp.status, ctx=ctx)
ctx=ctx)
def _get_resp_code(self, resp): def _get_resp_code(self, resp):
# Falcon response object doesn't have a raw status code. # Falcon response object doesn't have a raw status code.

View File

@ -24,6 +24,7 @@ LOG = logging.getLogger(__name__)
class ValidateDesignResource(base.BaseResource): class ValidateDesignResource(base.BaseResource):
@policy.ApiEnforcer('kubernetes_provisioner:post_validatedesign') @policy.ApiEnforcer('kubernetes_provisioner:post_validatedesign')
def on_post(self, req, resp): def on_post(self, req, resp):
result = ValidationMessage() result = ValidationMessage()

View File

@ -46,7 +46,7 @@ def _get_from_deckhand(design_ref, ctx=None):
else: else:
addl_headers = {} addl_headers = {}
auth = keystoneauth1.identity.v3.Password(**keystone_args) auth = keystoneauth1.identity.v3.Password(**keystone_args)
session = keystoneauth1.session.Session( session = keystoneauth1.session.Session(auth=auth,
auth=auth, additional_headers=addl_headers) additional_headers=addl_headers)
return session.get(design_ref[len(_DECKHAND_PREFIX):], timeout=DH_TIMEOUT) return session.get(design_ref[len(_DECKHAND_PREFIX):], timeout=DH_TIMEOUT)

View File

@ -12,6 +12,7 @@ LOG = logging.getLogger(__name__)
class EncryptionMethod(metaclass=abc.ABCMeta): class EncryptionMethod(metaclass=abc.ABCMeta):
@abc.abstractmethod @abc.abstractmethod
def encrypt(self, data): def encrypt(self, data):
pass pass
@ -50,6 +51,7 @@ class EncryptionMethod(metaclass=abc.ABCMeta):
class NullEncryptionMethod(EncryptionMethod): class NullEncryptionMethod(EncryptionMethod):
def encrypt(self, data): def encrypt(self, data):
LOG.debug('Performing NOOP encryption') LOG.debug('Performing NOOP encryption')
return data return data

View File

@ -114,18 +114,17 @@ def default_error_serializer(req, resp, exception):
""" """
Writes the default error message body, when we don't handle it otherwise Writes the default error message body, when we don't handle it otherwise
""" """
format_error_resp( format_error_resp(req,
req, resp,
resp, status_code=exception.status,
status_code=exception.status, message=exception.description,
message=exception.description, reason=exception.title,
reason=exception.title, error_type=exception.__class__.__name__,
error_type=exception.__class__.__name__, error_list=[{
error_list=[{ 'message': exception.description,
'message': exception.description, 'error': True
'error': True }],
}], info_list=None)
info_list=None)
def default_exception_handler(ex, req, resp, params): def default_exception_handler(ex, req, resp, params):
@ -140,13 +139,12 @@ def default_exception_handler(ex, req, resp, params):
# take care of the uncaught stuff # take care of the uncaught stuff
exc_string = traceback.format_exc() exc_string = traceback.format_exc()
LOG.error('Unhanded Exception being handled: \n%s', exc_string) LOG.error('Unhanded Exception being handled: \n%s', exc_string)
format_error_resp( format_error_resp(req,
req, resp,
resp, falcon.HTTP_500,
falcon.HTTP_500, error_type=ex.__class__.__name__,
error_type=ex.__class__.__name__, message="Unhandled Exception raised: %s" % str(ex),
message="Unhandled Exception raised: %s" % str(ex), retry=True)
retry=True)
class PromenadeException(Exception): class PromenadeException(Exception):
@ -190,8 +188,8 @@ class PromenadeException(Exception):
self.info_list = info_list self.info_list = info_list
self.retry = retry self.retry = retry
self.trace = trace self.trace = trace
super().__init__( super().__init__(PromenadeException._gen_ex_message(
PromenadeException._gen_ex_message(title, description)) title, description))
@staticmethod @staticmethod
def _gen_ex_message(title, description): def _gen_ex_message(title, description):
@ -204,16 +202,15 @@ class PromenadeException(Exception):
""" """
The handler used for app errors and child classes The handler used for app errors and child classes
""" """
format_error_resp( format_error_resp(req,
req, resp,
resp, ex.status,
ex.status, message=ex.title,
message=ex.title, reason=ex.description,
reason=ex.description, error_list=ex.error_list,
error_list=ex.error_list, info_list=ex.info_list,
info_list=ex.info_list, error_type=ex.__class__.__name__,
error_type=ex.__class__.__name__, retry=ex.retry)
retry=ex.retry)
def display(self, debug=False): def display(self, debug=False):
if self.trace or debug: if self.trace or debug:
@ -300,8 +297,9 @@ class InvalidFormatError(PromenadeException):
title = self.title title = self.title
if not description: if not description:
description = self.title description = self.title
super(InvalidFormatError, self).__init__( super(InvalidFormatError, self).__init__(title,
title, description, status=self.status) description,
status=self.status)
class ValidationException(PromenadeException): class ValidationException(PromenadeException):

View File

@ -10,6 +10,7 @@ LOG = logging.getLogger(__name__)
class Generator: class Generator:
def __init__(self, config, block_strings=True): def __init__(self, config, block_strings=True):
self.config = config self.config = config
self.keys = pki.PKI(block_strings=block_strings) self.keys = pki.PKI(block_strings=block_strings)
@ -69,8 +70,10 @@ class Generator:
def gen_cert(self, document_name, *, ca_cert, ca_key, **kwargs): def gen_cert(self, document_name, *, ca_cert, ca_key, **kwargs):
ca_cert_data = ca_cert['data'] ca_cert_data = ca_cert['data']
ca_key_data = ca_key['data'] ca_key_data = ca_key['data']
return self.keys.generate_certificate( return self.keys.generate_certificate(document_name,
document_name, ca_cert=ca_cert_data, ca_key=ca_key_data, **kwargs) ca_cert=ca_cert_data,
ca_key=ca_key_data,
**kwargs)
def gen_keypair(self, document_name): def gen_keypair(self, document_name):
return self.keys.generate_keypair(document_name) return self.keys.generate_keypair(document_name)
@ -95,9 +98,9 @@ class Generator:
document_name, kinds) document_name, kinds)
return docs return docs
else: else:
raise exceptions.IncompletePKIPairError( raise exceptions.IncompletePKIPairError('Incomplete set %s '
'Incomplete set %s ' 'for name: %s' %
'for name: %s' % (kinds, document_name)) (kinds, document_name))
else: else:
docs = self._find_in_outputs(schemas, document_name) docs = self._find_in_outputs(schemas, document_name)
@ -129,17 +132,16 @@ class Generator:
documents = self.get_documents() documents = self.get_documents()
with open(os.path.join(output_dir, 'certificates.yaml'), 'w') as f: with open(os.path.join(output_dir, 'certificates.yaml'), 'w') as f:
# Don't use safe_dump_all so we can block format certificate data. # Don't use safe_dump_all so we can block format certificate data.
yaml.dump_all( yaml.dump_all(documents,
documents, stream=f,
stream=f, default_flow_style=False,
default_flow_style=False, explicit_start=True,
explicit_start=True, indent=2)
indent=2)
def get_documents(self): def get_documents(self):
return list( return list(
itertools.chain.from_iterable( itertools.chain.from_iterable(v.values()
v.values() for v in self.outputs.values())) for v in self.outputs.values()))
def get_host_list(service_names): def get_host_list(service_names):

View File

@ -115,9 +115,10 @@ def _get_update_labels(existing_labels, input_labels):
# no existing labels found # no existing labels found
if not existing_labels: if not existing_labels:
# filter delete label request since there is no labels set on a node # filter delete label request since there is no labels set on a node
update_labels.update( update_labels.update({
{k: v k: v
for k, v in input_labels.items() if v is not None}) for k, v in input_labels.items() if v is not None
})
return update_labels return update_labels
# new labels or overriding labels # new labels or overriding labels

View File

@ -58,6 +58,7 @@ DEFAULT_CONFIG = {
class BlankContextFilter(logging.Filter): class BlankContextFilter(logging.Filter):
def filter(self, record): def filter(self, record):
for key in BLANK_CONTEXT_VALUES: for key in BLANK_CONTEXT_VALUES:
if getattr(record, key, None) is None: if getattr(record, key, None) is None:
@ -66,6 +67,7 @@ class BlankContextFilter(logging.Filter):
class Adapter(logging.LoggerAdapter): class Adapter(logging.LoggerAdapter):
def process(self, msg, kwargs): def process(self, msg, kwargs):
extra = kwargs.get('extra', {}) extra = kwargs.get('extra', {})

View File

@ -10,11 +10,10 @@ def setup(disable_keystone=False):
log_group = cfg.OptGroup(name='logging', title='Logging options') log_group = cfg.OptGroup(name='logging', title='Logging options')
cfg.CONF.register_group(log_group) cfg.CONF.register_group(log_group)
logging_options = [ logging_options = [
cfg.StrOpt( cfg.StrOpt('log_level',
'log_level', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], default='DEBUG',
default='DEBUG', help='Global log level for PROMENADE')
help='Global log level for PROMENADE')
] ]
cfg.CONF.register_opts(logging_options, group=log_group) cfg.CONF.register_opts(logging_options, group=log_group)
if disable_keystone is False: if disable_keystone is False:

View File

@ -13,6 +13,7 @@ LOG = logging.getLogger(__name__)
class PKI: class PKI:
def __init__(self, *, block_strings=True): def __init__(self, *, block_strings=True):
self.block_strings = block_strings self.block_strings = block_strings
self._ca_config_string = None self._ca_config_string = None
@ -193,8 +194,9 @@ class block_literal(str):
def block_literal_representer(dumper, data): def block_literal_representer(dumper, data):
return dumper.represent_scalar( return dumper.represent_scalar('tag:yaml.org,2002:str',
'tag:yaml.org,2002:str', str(data), style='|') str(data),
style='|')
yaml.add_representer(block_literal, block_literal_representer) yaml.add_representer(block_literal, block_literal_representer)

View File

@ -25,10 +25,9 @@ LOG = logging.getLogger(__name__)
policy_engine = None policy_engine = None
POLICIES = [ POLICIES = [
op.RuleDefault( op.RuleDefault('admin_required',
'admin_required', 'role:admin or is_admin:1',
'role:admin or is_admin:1', description='Actions requiring admin authority'),
description='Actions requiring admin authority'),
op.DocumentedRuleDefault('kubernetes_provisioner:get_join_scripts', op.DocumentedRuleDefault('kubernetes_provisioner:get_join_scripts',
'role:admin', 'Get join script for node', 'role:admin', 'Get join script for node',
[{ [{
@ -51,6 +50,7 @@ POLICIES = [
class PromenadePolicy: class PromenadePolicy:
def __init__(self): def __init__(self):
self.enforcer = op.Enforcer(cfg.CONF) self.enforcer = op.Enforcer(cfg.CONF)
@ -72,18 +72,18 @@ class ApiEnforcer(object):
self.action = action self.action = action
def __call__(self, f): def __call__(self, f):
@functools.wraps(f) @functools.wraps(f)
def secure_handler(slf, req, resp, *args, **kwargs): def secure_handler(slf, req, resp, *args, **kwargs):
ctx = req.context ctx = req.context
policy_eng = ctx.policy_engine policy_eng = ctx.policy_engine
# policy engine must be configured # policy engine must be configured
if policy_eng is not None: if policy_eng is not None:
LOG.debug( LOG.debug('Enforcing policy %s on request %s using engine %s',
'Enforcing policy %s on request %s using engine %s', self.action,
self.action, ctx.request_id,
ctx.request_id, policy_eng.__class__.__name__,
policy_eng.__class__.__name__, ctx=ctx)
ctx=ctx)
else: else:
LOG.error('No policy engine configured', ctx=ctx) LOG.error('No policy engine configured', ctx=ctx)
raise ex.PromenadeException( raise ex.PromenadeException(
@ -97,34 +97,30 @@ class ApiEnforcer(object):
LOG.debug('Request is authorized', ctx=ctx) LOG.debug('Request is authorized', ctx=ctx)
authorized = True authorized = True
except Exception: except Exception:
LOG.exception( LOG.exception('Error authorizing request for action %s',
'Error authorizing request for action %s', self.action,
self.action, ctx=ctx)
ctx=ctx) raise ex.ApiError(title="Expectation Failed",
raise ex.ApiError( status=falcon.HTTP_417,
title="Expectation Failed", retry=False)
status=falcon.HTTP_417,
retry=False)
if authorized: if authorized:
return f(slf, req, resp, *args, **kwargs) return f(slf, req, resp, *args, **kwargs)
else: else:
# raise the appropriate response exeception # raise the appropriate response exeception
if ctx.authenticated: if ctx.authenticated:
LOG.error( LOG.error('Unauthorized access attempted for action %s',
'Unauthorized access attempted for action %s', self.action,
self.action, ctx=ctx)
ctx=ctx)
raise ex.ApiError( raise ex.ApiError(
title="Forbidden", title="Forbidden",
status=falcon.HTTP_403, status=falcon.HTTP_403,
description="Credentials do not permit access", description="Credentials do not permit access",
retry=False) retry=False)
else: else:
LOG.error( LOG.error('Unathenticated access attempted for action %s',
'Unathenticated access attempted for action %s', self.action,
self.action, ctx=ctx)
ctx=ctx)
raise ex.ApiError( raise ex.ApiError(
title="Unauthenticated", title="Unauthenticated",
status=falcon.HTTP_401, status=falcon.HTTP_401,

View File

@ -30,8 +30,8 @@ def build_tarball_from_roles(config, *, roles, file_specs):
def insert_charts_into_bundler(bundler): def insert_charts_into_bundler(bundler):
for root, _dirnames, filenames in os.walk( for root, _dirnames, filenames in os.walk('/opt/promenade/charts',
'/opt/promenade/charts', followlinks=True): followlinks=True):
for source_filename in filenames: for source_filename in filenames:
if _source_file_is_excluded(source_filename): if _source_file_is_excluded(source_filename):
continue continue
@ -43,8 +43,9 @@ def insert_charts_into_bundler(bundler):
LOG.debug('Copying asset file %s (mode=%o)', source_path, LOG.debug('Copying asset file %s (mode=%o)', source_path,
stat.st_mode) stat.st_mode)
with open(source_path) as f: with open(source_path) as f:
bundler.add( bundler.add(path=destination_path,
path=destination_path, data=f.read(), mode=stat.st_mode) data=f.read(),
mode=stat.st_mode)
def render_role_into_bundler(*, bundler, config, role): def render_role_into_bundler(*, bundler, config, role):
@ -57,12 +58,11 @@ def render_role_into_bundler(*, bundler, config, role):
stat = os.stat(source_path) stat = os.stat(source_path)
LOG.debug('Rendering file %s (mode=%o)', source_path, stat.st_mode) LOG.debug('Rendering file %s (mode=%o)', source_path, stat.st_mode)
destination_path = os.path.join(destination_base, source_filename) destination_path = os.path.join(destination_base, source_filename)
render_template_into_bundler( render_template_into_bundler(bundler=bundler,
bundler=bundler, config=config,
config=config, destination_path=destination_path,
destination_path=destination_path, source_path=source_path,
source_path=source_path, mode=stat.st_mode)
mode=stat.st_mode)
def render_template_into_bundler(*, bundler, config, destination_path, def render_template_into_bundler(*, bundler, config, destination_path,
@ -126,8 +126,8 @@ def _base64_encode(s):
def _fill_no_proxy(network_config): def _fill_no_proxy(network_config):
proxy = network_config.get('proxy', {}).get('url') proxy = network_config.get('proxy', {}).get('url')
if proxy: if proxy:
additional = network_config.get('proxy', {}).get( additional = network_config.get('proxy',
'additional_no_proxy', []) {}).get('additional_no_proxy', [])
if additional: if additional:
return ','.join(additional) + ',' + _default_no_proxy( return ','.join(additional) + ',' + _default_no_proxy(
network_config) network_config)
@ -145,8 +145,8 @@ def _default_no_proxy(network_config):
'kubernetes', 'kubernetes',
'kubernetes.default', 'kubernetes.default',
'kubernetes.default.svc', 'kubernetes.default.svc',
'kubernetes.default.svc.%s' % network_config.get('dns', {}).get( 'kubernetes.default.svc.%s' %
'cluster_domain', 'cluster.local'), network_config.get('dns', {}).get('cluster_domain', 'cluster.local'),
] ]
return ','.join(include) return ','.join(include)

View File

@ -11,6 +11,7 @@ LOG = logging.getLogger(__name__)
class TarBundler: class TarBundler:
def __init__(self): def __init__(self):
self._tar_blob = io.BytesIO() self._tar_blob = io.BytesIO()
self._tf = tarfile.open(fileobj=self._tar_blob, mode='w|gz') self._tf = tarfile.open(fileobj=self._tar_blob, mode='w|gz')

View File

@ -140,8 +140,8 @@ def _load_schemas():
for schema in yaml.safe_load_all(f): for schema in yaml.safe_load_all(f):
name = schema['metadata']['name'] name = schema['metadata']['name']
if name in SCHEMAS: if name in SCHEMAS:
raise RuntimeError( raise RuntimeError('Duplicate schema specified for: %s' %
'Duplicate schema specified for: %s' % name) name)
SCHEMAS[name] = schema['data'] SCHEMAS[name] = schema['data']

View File

@ -1,19 +1,35 @@
Beaker==1.12.0 #
click==8.1.3 Beaker<=1.12.0
falcon==3.1.1 click
Jinja2==3.1.2 ConfigParser
jsonpath-ng==1.5.3 Deckhand @ git+https://opendev.org/airship/deckhand.git@ac4edb0c64c9f9af62e7cb63f049508596d25747#egg=deckhand
jsonschema==3.2.0 docutils
keystoneauth1==5.1.1 falcon
keystonemiddleware==10.2.0 fixtures
setuptools==67.0.0 importlib_metadata
Jinja2
jsonpath_ng
jsonschema<=3.2.0
keystoneauth1<=5.1.1
kubernetes==26.1.0 kubernetes==26.1.0
oslo.context==5.0.0 MarkupSafe<2.1.0, >=0.9.2
oslo.policy==4.0.0 mock
PasteDeploy==3.0.1 nose
oslo.config<=8.7.1
oslo.context<=4.1.0
oslo.policy<=3.10.1
pylibyaml==0.1.0 pylibyaml==0.1.0
PyYAML==5.4.1 PyYAML<=5.4.1
requests==2.28.2 reno
uWSGI==2.0.21 requests==2.27.0
Deckhand @ git+https://opendev.org/airship/deckhand.git@70aa35a396d5f76753616f5289228f9c2b0e7ec7 setuptools<=45.2.0
# Deckhand @ git+https://review.opendev.org/airship/deckhand@refs/changes/93/869293/222#egg=deckhand six
Sphinx
sphinx-rtd-theme==0.5.0
testrepository
testresources
testscenarios
testtools<=2.5.0
urllib3 >= 1.21.1, <= 1.25.11
virtualenv
wheel

View File

@ -1,136 +1,175 @@
alabaster==0.7.13 alabaster==0.7.13
alembic==1.7.1 alembic==1.4.3
amqp==5.0.8 amqp==2.6.1
attrs==22.2.0 argcomplete==3.0.5
attrs==23.1.0
autopage==0.5.1 autopage==0.5.1
Babel==2.11.0 Babel==2.12.1
bandit==1.6.0
bcrypt==4.0.1 bcrypt==4.0.1
Beaker==1.12.0 Beaker==1.12.0
cachetools==5.3.0 cachetools==5.3.0
certifi==2022.12.7 certifi==2022.12.7
cffi==1.15.1 cffi==1.15.1
charset-normalizer==3.0.1 chardet==3.0.4
charset-normalizer==2.0.12
click==8.1.3 click==8.1.3
cliff==4.2.0 cliff==4.2.0
cmd2==2.4.3 cmd2==2.4.3
configparser==5.3.0
coverage==7.2.3
cryptography==3.4.8 cryptography==3.4.8
debtcollector==2.5.0 debtcollector==2.5.0
Deckhand @ git+https://opendev.org/airship/deckhand.git@70aa35a396d5f76753616f5289228f9c2b0e7ec7 Deckhand @ git+https://opendev.org/airship/deckhand.git@ac4edb0c64c9f9af62e7cb63f049508596d25747#egg=deckhand
decorator==5.1.1 decorator==5.1.1
deepdiff==5.8.1 deepdiff==5.8.1
distlib==0.3.6
dnspython==2.3.0 dnspython==2.3.0
docutils==0.17.1 docutils==0.19
dogpile.cache==1.1.8 dogpile.cache==1.1.8
entrypoints==0.3 dulwich==0.21.3
eventlet==0.33.3 eventlet==0.33.3
exceptiongroup==1.1.1
extras==1.0.0 extras==1.0.0
falcon==3.1.1 falcon==3.1.1
fasteners==0.18 fasteners==0.18
filelock==3.12.0
fixtures==3.0.0 fixtures==3.0.0
flake8==3.7.9 flake8==3.8.4
future==0.18.3
futurist==2.4.1 futurist==2.4.1
google-auth==2.16.1 gitdb==4.0.10
GitPython==3.1.31
google-auth==2.17.3
greenlet==2.0.2 greenlet==2.0.2
hacking==3.0.1 hacking==4.1.0
html5lib==0.9999999
httpexceptor==1.4.0
idna==3.4 idna==3.4
imagesize==1.4.1 imagesize==1.4.1
importlib-metadata==6.0.0 importlib-metadata==6.5.0
importlib-resources==5.12.0 iniconfig==2.0.0
iso8601==1.1.0 iso8601==1.1.0
Jinja2==3.1.2 Jinja2==3.1.2
jsonpath-ng==1.5.3 jsonpath-ng==1.5.3
jsonpickle==3.0.1 jsonpath-rw==1.4.0
jsonpath-rw-ext==1.2.2
jsonpickle==1.4.1
jsonschema==3.2.0 jsonschema==3.2.0
keystoneauth1==5.1.1 keystoneauth1==5.1.1
keystonemiddleware==10.2.0 keystonemiddleware==10.2.0
kombu==5.1.0 kombu==4.6.11
kubernetes==26.1.0 kubernetes==26.1.0
Mako==1.2.4 Mako==1.2.4
MarkupSafe==2.1.2 MarkupSafe==2.0.1
mccabe==0.6.1 mccabe==0.6.1
msgpack==1.0.4 mock==5.0.2
msgpack==1.0.5
netaddr==0.8.0 netaddr==0.8.0
netifaces==0.11.0 netifaces==0.11.0
networkx==2.6.2 networkx==3.1
nose==1.3.7
oauthlib==3.2.2 oauthlib==3.2.2
ordered-set==4.1.0 ordered-set==4.1.0
os-service-types==1.7.0 os-service-types==1.7.0
oslo.cache==2.8.2 oslo.cache==2.10.1
oslo.concurrency==4.4.1 oslo.concurrency==5.1.1
oslo.config==8.7.1 oslo.config==8.7.1
oslo.context==5.0.0 oslo.context==4.1.0
oslo.db==11.0.0 oslo.db==10.0.0
oslo.i18n==6.0.0 oslo.i18n==6.0.0
oslo.log==4.6.0 oslo.log==4.6.0
oslo.messaging==12.9.4 oslo.messaging==12.13.0
oslo.metrics==0.6.0 oslo.metrics==0.6.0
oslo.middleware==4.4.0 oslo.middleware==4.4.0
oslo.policy==4.0.0 oslo.policy==3.10.1
oslo.serialization==4.2.0 oslo.serialization==4.2.0
oslo.service==3.1.1 oslo.service==3.1.1
oslo.utils==4.10.2 oslo.utils==4.12.3
packaging==23.0 packaging==21.3
Paste==3.5.0 Paste==3.5.0
PasteDeploy==3.0.1 PasteDeploy==3.0.1
pbr==5.6.0 PasteScript==3.3.0
pbr==5.5.1
pip==23.0.1
platformdirs==3.2.0
pluggy==1.0.0
ply==3.11 ply==3.11
prettytable==3.6.0 prettytable==3.7.0
prometheus-client==0.16.0 prometheus-client==0.16.0
psycopg2-binary==2.9.5 psycopg2-binary==2.9.6
pyasn1==0.4.8 pyasn1==0.5.0
pyasn1-modules==0.2.8 pyasn1-modules==0.3.0
pycadf==3.1.1 pycadf==3.1.1
pycodestyle==2.5.0 pycodestyle==2.6.0
pycparser==2.21 pycparser==2.21
pyflakes==2.1.1 pyflakes==2.2.0
Pygments==2.14.0 Pygments==2.14.0
pyinotify==0.9.6
pylibyaml==0.1.0 pylibyaml==0.1.0
pyparsing==3.0.9 pymongo==4.3.3
pyparsing==2.4.7
pyperclip==1.8.2 pyperclip==1.8.2
pyproject_api==1.5.0
pyrsistent==0.19.3 pyrsistent==0.19.3
pytest==7.3.1
pytest-cov==4.0.0
python-barbicanclient==5.2.0 python-barbicanclient==5.2.0
python-dateutil==2.8.2 python-dateutil==2.8.2
python-keystoneclient==3.22.0 python-editor==1.0.4
python-keystoneclient==5.1.0
python-memcached==1.59 python-memcached==1.59
python-subunit==1.4.2 python-mimeparse==1.6.0
pytz==2022.7.1 python-subunit==1.4.0
pytz==2023.3
PyYAML==5.4.1 PyYAML==5.4.1
reno==4.0.0
repoze.lru==0.7 repoze.lru==0.7
requests==2.28.2 requests==2.27.0
requests-oauthlib==1.3.1 requests-oauthlib==1.3.1
resolver==0.2.1
rfc3986==2.0.0 rfc3986==2.0.0
Routes==2.5.1 Routes==2.5.1
rsa==4.9 rsa==4.9
selector==0.10.1
setuptools==45.2.0
simplejson==3.19.1
six==1.16.0 six==1.16.0
smmap==5.0.0
snowballstemmer==2.2.0 snowballstemmer==2.2.0
Sphinx==5.3.0 Sphinx==6.1.3
sphinx-rtd-theme==1.1.1 sphinx-rtd-theme==0.5.0
sphinxcontrib-applehelp==1.0.4 sphinxcontrib-applehelp==1.0.4
sphinxcontrib-devhelp==1.0.2 sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.1 sphinxcontrib-htmlhelp==2.0.1
sphinxcontrib-jsmath==1.0.1 sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3 sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5 sphinxcontrib-serializinghtml==1.1.5
SQLAlchemy==1.4.23 SQLAlchemy==1.3.20
sqlalchemy-migrate==0.13.0 sqlalchemy-migrate==0.13.0
sqlparse==0.4.3 sqlparse==0.4.4
statsd==4.0.1 statsd==4.0.1
stestr==3.2.0 stevedore==5.0.0
stevedore==4.1.1
Tempita==0.5.2 Tempita==0.5.2
testrepository==0.0.20
testresources==2.0.1 testresources==2.0.1
testscenarios==0.5.0 testscenarios==0.5.0
testtools==2.5.0 testtools==2.5.0
urllib3==1.26.6 tiddlyweb==2.4.3
tomli==2.0.1
tomlkit==0.11.7
typing_extensions==4.5.0
urllib3==1.25.11
uWSGI==2.0.21 uWSGI==2.0.21
vine==5.0.0 vine==1.3.0
voluptuous==0.13.1 virtualenv==20.22.0
wcwidth==0.2.6 wcwidth==0.2.6
WebOb==1.8.7 WebOb==1.8.7
websocket-client==1.5.1 websocket-client==1.5.1
Werkzeug==2.0.1 Werkzeug==2.1.2
wrapt==1.14.1 wheel==0.40.0
wrapt==1.15.0
wsgi-intercept==1.11.0
xmltodict==0.13.0
yapf==0.33.0
yappi==1.4.0 yappi==1.4.0
zipp==3.14.0 yq==3.2.1
zipp==3.15.0

View File

@ -1,5 +1,6 @@
[metadata] [metadata]
name = promenade name = promenade
version = 1.1
summary = Promenade is a tool for bootstrapping a resilient kubernetes cluster and managing its life-cycle via helm charts. summary = Promenade is a tool for bootstrapping a resilient kubernetes cluster and managing its life-cycle via helm charts.
description_file = README.md description_file = README.md
author = The Airship Authors author = The Airship Authors
@ -13,6 +14,7 @@ classifier =
Operating System :: POSIX :: Linux Operating System :: POSIX :: Linux
Programming Language :: Python :: 3 Programming Language :: Python :: 3
Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.10
[files] [files]
packages = packages =

View File

@ -1,4 +1,5 @@
pytest pytest >= 3.0
flake8==3.7.9 pytest-cov==4.0.0
bandit>=1.5 flake8==3.8.4
yapf==0.24.0 bandit==1.6.0
yapf

View File

@ -62,8 +62,9 @@ def test_node_labels_pass(mock_kubeclient, mock_update_node_labels, client,
""" """
mock_kubeclient.return_value = None mock_kubeclient.return_value = None
mock_update_node_labels.return_value = _mock_update_node_labels() mock_update_node_labels.return_value = _mock_update_node_labels()
response = client.simulate_put( response = client.simulate_put('/api/v1.0/node-labels/ubuntubox',
'/api/v1.0/node-labels/ubuntubox', headers=req_header, body=req_body) headers=req_header,
body=req_body)
assert response.status == falcon.HTTP_200 assert response.status == falcon.HTTP_200
assert response.json["status"] == "Success" assert response.json["status"] == "Success"
@ -77,8 +78,9 @@ def test_node_labels_missing_inputs(client, req_header, req_body):
req_header: API request header req_header: API request header
req_body: API request body req_body: API request body
""" """
response = client.simulate_post( response = client.simulate_post('/api/v1.0/node-labels',
'/api/v1.0/node-labels', headers=req_header, body=req_body) headers=req_header,
body=req_body)
assert response.status == falcon.HTTP_404 assert response.status == falcon.HTTP_404

View File

@ -49,8 +49,9 @@ def std_body():
def test_post_validatedesign_empty_docs(client, std_body, std_headers): def test_post_validatedesign_empty_docs(client, std_body, std_headers):
with mock.patch('promenade.design_ref.get_documents') as gd: with mock.patch('promenade.design_ref.get_documents') as gd:
gd.return_value = ([], False) gd.return_value = ([], False)
response = client.simulate_post( response = client.simulate_post('/api/v1.0/validatedesign',
'/api/v1.0/validatedesign', headers=std_headers, body=std_body) headers=std_headers,
body=std_body)
assert response.status == falcon.HTTP_400 assert response.status == falcon.HTTP_400
assert response.json['details']['errorCount'] == 5 assert response.json['details']['errorCount'] == 5
@ -93,10 +94,8 @@ VALID_DOCS = [
'files': [{ 'files': [{
'content': 'content':
'# placeholder for triggering calico etcd bootstrapping', '# placeholder for triggering calico etcd bootstrapping',
'mode': 'mode': 420,
420, 'path': '/var/lib/anchor/calico-etcd-bootstrap'
'path':
'/var/lib/anchor/calico-etcd-bootstrap'
}], }],
'hostname': 'hostname':
'n0', 'n0',
@ -107,10 +106,8 @@ VALID_DOCS = [
'registry.k8s.io/kube-apiserver-amd64:v1.26.0', 'registry.k8s.io/kube-apiserver-amd64:v1.26.0',
'controller-manager': 'controller-manager':
'registry.k8s.io/kube-controller-manager-amd64:v1.26.0', 'registry.k8s.io/kube-controller-manager-amd64:v1.26.0',
'etcd': 'etcd': 'quay.io/coreos/etcd:v3.5.4',
'quay.io/coreos/etcd:v3.5.4', 'scheduler': 'registry.k8s.io/kube-scheduler-amd64:v1.26.0'
'scheduler':
'registry.k8s.io/kube-scheduler-amd64:v1.26.0'
} }
}, },
'ip': 'ip':
@ -137,8 +134,7 @@ VALID_DOCS = [
}, },
{ {
'data': { 'data': {
'files': 'files': [{
[{
'mode': 'mode':
365, 365,
'path': 'path':
@ -147,15 +143,12 @@ VALID_DOCS = [
'kubernetes/node/bin/kubelet', 'kubernetes/node/bin/kubelet',
'tar_url': 'tar_url':
'https://dl.k8s.io/v1.26.0/kubernetes-node-linux-amd64.tar.gz' 'https://dl.k8s.io/v1.26.0/kubernetes-node-linux-amd64.tar.gz'
}, }, {
{ 'content':
'content': '/var/lib/docker/containers/*/*-json.log\n{\n compress\n copytruncate\n create 0644 root root\n daily\n dateext\n dateformat -%Y%m%d-%s\n maxsize 10M\n missingok\n notifempty\n su root root\n rotate 1\n}',
'/var/lib/docker/containers/*/*-json.log\n{\n compress\n copytruncate\n create 0644 root root\n daily\n dateext\n dateformat -%Y%m%d-%s\n maxsize 10M\n missingok\n notifempty\n su root root\n rotate 1\n}', 'mode': 292,
'mode': 'path': '/etc/logrotate.d/json-logrotate'
292, }],
'path':
'/etc/logrotate.d/json-logrotate'
}],
'images': { 'images': {
'haproxy': 'haproxy:1.8.3', 'haproxy': 'haproxy:1.8.3',
'helm': { 'helm': {
@ -261,7 +254,8 @@ VALID_DOCS = [
def test_post_validatedesign_valid_docs(client, std_body, std_headers): def test_post_validatedesign_valid_docs(client, std_body, std_headers):
with mock.patch('promenade.design_ref.get_documents') as gd: with mock.patch('promenade.design_ref.get_documents') as gd:
gd.return_value = (VALID_DOCS, False) gd.return_value = (VALID_DOCS, False)
response = client.simulate_post( response = client.simulate_post('/api/v1.0/validatedesign',
'/api/v1.0/validatedesign', headers=std_headers, body=std_body) headers=std_headers,
body=std_body)
assert response.status == falcon.HTTP_200 assert response.status == falcon.HTTP_200
assert response.json['details']['errorCount'] == 0 assert response.json['details']['errorCount'] == 0

View File

@ -86,9 +86,9 @@ def _valid_dg(config, dynamic_tag, context_name=None):
if config.get('context', {}).get(context_name): if config.get('context', {}).get(context_name):
return True return True
else: else:
raise TagGenExeception( raise TagGenExeception('Dynamic tag "%s" requested, but "%s"'
'Dynamic tag "%s" requested, but "%s"' ' not found in context' %
' not found in context' % (dynamic_tag, context_name)) (dynamic_tag, context_name))
else: else:
return False return False

View File

@ -3,6 +3,9 @@
set -ex set -ex
export DEBIAN_FRONTEND=noninteractive
echo 'debconf debconf/frontend select Noninteractive' | sudo debconf-set-selections
CFSSL_URL=${CFSSL_URL:-https://pkg.cfssl.org/R1.2/cfssl_linux-amd64} CFSSL_URL=${CFSSL_URL:-https://pkg.cfssl.org/R1.2/cfssl_linux-amd64}
if [[ ! $(command -v cfssl) ]]; then if [[ ! $(command -v cfssl) ]]; then

View File

@ -1,20 +1,13 @@
- hosts: all - hosts: all
roles:
- bindep
- ensure-docker
- ensure-python
- ensure-pip
tasks: tasks:
- include_vars: vars.yaml - include_vars: vars.yaml
- name: Ensure pip
include_role:
name: ensure-pip
- name: Clear firewall
include_role:
name: clear-firewall
- name: Ensure docker
include_role:
name: ensure-docker
- name: Debug tag generation inputs - name: Debug tag generation inputs
block: block:
- debug: - debug:
@ -41,6 +34,14 @@
debug: debug:
var: image_tags var: image_tags
- name: Install Docker python module for ansible docker login
block:
- pip:
name: docker
version: 4.4.4
executable: pip3
become: True
- name: Make images - name: Make images
when: not publish when: not publish
block: block:

View File

@ -11,6 +11,12 @@
# limitations under the License. # limitations under the License.
- hosts: primary - hosts: primary
roles:
- bindep
- ensure-docker
- ensure-python
- ensure-pip
tasks: tasks:
- name: Execute the make target for basic testing - name: Execute the make target for basic testing
make: make:

36
tox.ini
View File

@ -14,16 +14,18 @@ pass_env =
commands = {posargs} commands = {posargs}
[testenv:py38] [testenv:py38]
allowlist_externals =
pytest
setenv = setenv =
PYTHONWARNING=all PYTHONWARNING=all
deps = -r{toxinidir}/requirements-frozen.txt deps =
-r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements-frozen.txt
commands = commands =
pytest {posargs} pytest {posargs}
[testenv:bandit] [testenv:bandit]
deps = deps =
-r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements-frozen.txt
commands = commands =
bandit --skip B324 -r promenade bandit --skip B324 -r promenade
@ -33,34 +35,46 @@ allowlist_externals =
rm rm
deps = deps =
-r{toxinidir}/doc/requirements.txt -r{toxinidir}/doc/requirements.txt
-r{toxinidir}/requirements-frozen.txt
commands = commands =
rm -rf doc/build rm -rf doc/build
sphinx-build -W -b html doc/source doc/build/html sphinx-build -W -b html doc/source doc/build/html
[testenv:fmt] [testenv:fmt]
deps = deps =
-r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements-frozen.txt
allowlist_externals =
yapf
commands = commands =
yapf -ir {toxinidir}/promenade {toxinidir}/tests {toxinidir}/tools/image_tags.py yapf -ir {toxinidir}/promenade {toxinidir}/tests {toxinidir}/tools/image_tags.py
[testenv:freeze] [testenv:freeze]
deps = -r{toxinidir}/requirements-direct.txt
recreate = True recreate = True
allowlist_externals = sh allowlist_externals=
grep rm
sh
deps=
-r{toxinidir}/requirements-direct.txt
-r{toxinidir}/test-requirements.txt
commands= commands=
sh -c "pip freeze | grep -vE '^(promenade)|(pkg-resources)' > {toxinidir}/requirements-frozen.txt" rm -f requirements-frozen.txt
rm -f requirements-tree.txt
sh -c "pip freeze --all | grep -vE 'promenade|pyinotify|pkg-resources' > requirements-frozen.txt"
[testenv:gate-lint] [testenv:gate-lint]
deps = deps =
jsonschema==2.6.0 -r{toxinidir}/requirements-frozen.txt
allowlist_externals = sh allowlist_externals = sh
commands = commands =
{toxinidir}/tools/lint_gate.sh sh -c "{toxinidir}/tools/lint_gate.sh"
[testenv:pep8] [testenv:pep8]
deps = deps =
-r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements-frozen.txt
allowlist_externals =
yapf
bandit
flake8
commands = commands =
yapf -rd {toxinidir}/promenade {toxinidir}/tests {toxinidir}/tools/image_tags.py yapf -rd {toxinidir}/promenade {toxinidir}/tests {toxinidir}/tools/image_tags.py
flake8 {toxinidir}/promenade flake8 {toxinidir}/promenade

View File

@ -46,7 +46,7 @@
name: airship-promenade-chart-build-latest-htk name: airship-promenade-chart-build-latest-htk
description: | description: |
Lints charts using latest HTK Lints charts using latest HTK
voting: false voting: true
run: tools/zuul/playbooks/helm-linter.yaml run: tools/zuul/playbooks/helm-linter.yaml
timeout: 300 timeout: 300
nodeset: airship-promenade-single-node-focal nodeset: airship-promenade-single-node-focal

View File

@ -22,7 +22,7 @@
check: check:
jobs: jobs:
- openstack-tox-pep8 - openstack-tox-pep8
# - openstack-tox-py38 - openstack-tox-docs
- airship-promenade-lint-ws - airship-promenade-lint-ws
- airship-promenade-docker-build-gate - airship-promenade-docker-build-gate
- airship-promenade-chart-build-gate - airship-promenade-chart-build-gate
@ -32,6 +32,7 @@
gate: gate:
jobs: jobs:
- openstack-tox-pep8 - openstack-tox-pep8
- openstack-tox-docs
- airship-promenade-lint-ws - airship-promenade-lint-ws
- airship-promenade-docker-build-gate - airship-promenade-docker-build-gate
- airship-promenade-chart-build-gate - airship-promenade-chart-build-gate