[focal] Python modules sync with Airship project

- uplifted/downgraded some python modules
- fixed falcon.API deprecation - -> falcon.App
- uplifted deckhand reference for python deps
- fixed formatting style  using yapf linter
- added bindep role and bindep.txt file with required deps
- fixed quai docker image publishing
- re-enabled openstack-tox-py38 gate job

Change-Id: I0e248182efad75630721a1291bc86a5edc79c22a
This commit is contained in:
Sergiy Markin 2023-04-07 15:33:51 +00:00
parent 7e4bf90233
commit 32ad8a96b0
37 changed files with 440 additions and 338 deletions

View File

@ -19,7 +19,7 @@ formats:
# Optionally set the version of Python and requirements required to build your docs
python:
version: 3.7
version: 3.8
install:
- requirements: doc/requirements.txt
- requirements: requirements.txt

View File

@ -46,6 +46,7 @@ tests-unit: external-deps
tox -e py38
external-deps:
export DEBIAN_FRONTEND=noninteractive
./tools/install-external-deps.sh
tests-pep8:
@ -69,10 +70,12 @@ helm-init-%: helm-toolkit
lint: helm-lint gate-lint
gate-lint: gate-lint-deps
export DEBIAN_FRONTEND=noninteractive
tox -e gate-lint
gate-lint-deps:
sudo apt-get install -y --no-install-recommends shellcheck tox
sudo apt install -y --no-install-recommends shellcheck
sudo pip3 install tox
helm-lint: $(addprefix helm-lint-,$(CHARTS))

13
bindep.txt Normal file
View File

@ -0,0 +1,13 @@
# This file contains runtime (non-python) dependencies
# More info at: https://docs.openstack.org/infra/bindep/readme.html
# PlantUML is used for documentation builds, graphviz is it's soft dependancy
plantuml
graphviz
libffi-dev [test platform:dpkg]
libkrb5-dev [platform:dpkg]
libpq-dev [platform:dpkg]
libsasl2-dev [platform:dpkg]
libssl-dev [platform:dpkg]
libre2-dev [platform:dpkg]
apt-utils [platform:dpkg]

View File

@ -1,5 +1,5 @@
apiVersion: v1
description: The Promenade API
name: promenade
version: 0.1.0
version: 0.1.3
appVersion: 1.1.0

View File

@ -1,5 +1,4 @@
sphinx>=1.6.2
sphinx-rtd-theme==1.1.1
falcon>=1.4.1
oslo.config==8.7.1
markupsafe==2.0.1
sphinx
sphinx-rtd-theme==0.5.0
oslo.config<=8.7.1
MarkupSafe<2.1.0

View File

@ -29,6 +29,7 @@ CACHE = CacheManager(**parse_cache_config_options(CACHE_OPTS))
class Builder:
def __init__(self, config, *, validators=False):
self.config = config
self.validators = validators
@ -64,9 +65,8 @@ class Builder:
@property
def _file_specs(self):
return itertools.chain(
self.config.get_path('HostSystem:files', []),
self.config.get_path('Genesis:files', []))
return itertools.chain(self.config.get_path('HostSystem:files', []),
self.config.get_path('Genesis:files', []))
def build_all(self, *, output_dir):
self.build_genesis(output_dir=output_dir)
@ -99,21 +99,23 @@ class Builder:
(encrypted_tarball, decrypt_setup_command, decrypt_command,
decrypt_teardown_command) = _encrypt_genesis(sub_config, tarball)
return renderer.render_template(
sub_config,
template='scripts/genesis.sh',
context={
'decrypt_command': decrypt_command,
'decrypt_setup_command': decrypt_setup_command,
'decrypt_teardown_command': decrypt_teardown_command,
'encrypted_tarball': encrypted_tarball,
},
roles=genesis_roles)
return renderer.render_template(sub_config,
template='scripts/genesis.sh',
context={
'decrypt_command': decrypt_command,
'decrypt_setup_command':
decrypt_setup_command,
'decrypt_teardown_command':
decrypt_teardown_command,
'encrypted_tarball':
encrypted_tarball,
},
roles=genesis_roles)
def _build_genesis_validate_script(self):
sub_config = self.config.extract_genesis_config()
return renderer.render_template(
sub_config, template='scripts/validate-genesis.sh')
return renderer.render_template(sub_config,
template='scripts/validate-genesis.sh')
def build_node(self, node_document, *, output_dir):
node_name = node_document['metadata']['name']
@ -134,27 +136,30 @@ class Builder:
f['path'] for f in self.config.get_path('HostSystem:files', [])
]
file_specs = [self.file_cache[p] for p in file_spec_paths]
tarball = renderer.build_tarball_from_roles(
config=sub_config, roles=build_roles, file_specs=file_specs)
tarball = renderer.build_tarball_from_roles(config=sub_config,
roles=build_roles,
file_specs=file_specs)
(encrypted_tarball, decrypt_setup_command, decrypt_command,
decrypt_teardown_command) = _encrypt_node(sub_config, tarball)
return renderer.render_template(
sub_config,
template='scripts/join.sh',
context={
'decrypt_command': decrypt_command,
'decrypt_setup_command': decrypt_setup_command,
'decrypt_teardown_command': decrypt_teardown_command,
'encrypted_tarball': encrypted_tarball,
},
roles=build_roles)
return renderer.render_template(sub_config,
template='scripts/join.sh',
context={
'decrypt_command': decrypt_command,
'decrypt_setup_command':
decrypt_setup_command,
'decrypt_teardown_command':
decrypt_teardown_command,
'encrypted_tarball':
encrypted_tarball,
},
roles=build_roles)
def _build_node_validate_script(self, node_name):
sub_config = self.config.extract_node_config(node_name)
return renderer.render_template(
sub_config, template='scripts/validate-join.sh')
return renderer.render_template(sub_config,
template='scripts/validate-join.sh')
def _encrypt_genesis(config, data):

View File

@ -18,19 +18,19 @@ def promenade(*, verbose):
@promenade.command('build-all', help='Construct all scripts')
@click.option(
'-o',
'--output-dir',
default='.',
type=click.Path(
exists=True, file_okay=False, dir_okay=True, resolve_path=True),
required=True,
help='Location to write complete cluster configuration.')
@click.option('-o',
'--output-dir',
default='.',
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
resolve_path=True),
required=True,
help='Location to write complete cluster configuration.')
@click.option('--validators', is_flag=True, help='Generate validation scripts')
@click.option(
'--leave-kubectl',
is_flag=True,
help='Leave behind kubectl on joined nodes')
@click.option('--leave-kubectl',
is_flag=True,
help='Leave behind kubectl on joined nodes')
@click.argument('config_files', nargs=-1, type=click.File('rb'))
def build_all(*, config_files, leave_kubectl, output_dir, validators):
debug = _debug()
@ -49,23 +49,23 @@ def build_all(*, config_files, leave_kubectl, output_dir, validators):
@promenade.command('generate-certs', help='Generate a certs for a site')
@click.option(
'-o',
'--output-dir',
type=click.Path(
exists=True, file_okay=False, dir_okay=True, resolve_path=True),
required=True,
help='Location to write *-certificates.yaml')
@click.option('-o',
'--output-dir',
type=click.Path(exists=True,
file_okay=False,
dir_okay=True,
resolve_path=True),
required=True,
help='Location to write *-certificates.yaml')
@click.argument('config_files', nargs=-1, type=click.File('rb'))
def generate_certs(*, config_files, output_dir):
debug = _debug()
try:
c = config.Configuration.from_streams(
debug=debug,
streams=config_files,
substitute=True,
allow_missing_substitutions=True,
validate=False)
c = config.Configuration.from_streams(debug=debug,
streams=config_files,
substitute=True,
allow_missing_substitutions=True,
validate=False)
g = generator.Generator(c)
g.generate(output_dir)
except exceptions.PromenadeException as e:

View File

@ -13,6 +13,7 @@ LOG = logging.getLogger(__name__)
class Configuration:
def __init__(self,
*,
documents,
@ -63,11 +64,10 @@ class Configuration:
def from_design_ref(cls, design_ref, ctx=None, **kwargs):
documents, use_dh_engine = dr.get_documents(design_ref, ctx)
return cls(
documents=documents,
substitute=use_dh_engine,
validate=use_dh_engine,
**kwargs)
return cls(documents=documents,
substitute=use_dh_engine,
validate=use_dh_engine,
**kwargs)
def __getitem__(self, path):
return self.get_path(
@ -105,8 +105,10 @@ class Configuration:
schema = 'promenade/%s/v1' % kind
for document in self.documents:
if _matches_filter(
document, schema=schema, labels=labels, name=name):
if _matches_filter(document,
schema=schema,
labels=labels,
name=name):
yield document
def find(self, *args, **kwargs):
@ -122,12 +124,11 @@ class Configuration:
else:
LOG.debug('Excluding schema=%s metadata.name=%s',
document['schema'], _mg(document, 'name'))
return Configuration(
debug=self.debug,
documents=documents,
leave_kubectl=self.leave_kubectl,
substitute=False,
validate=False)
return Configuration(debug=self.debug,
documents=documents,
leave_kubectl=self.leave_kubectl,
substitute=False,
validate=False)
def extract_node_config(self, name):
LOG.debug('Extracting node config for %s.', name)
@ -145,12 +146,11 @@ class Configuration:
continue
else:
documents.append(document)
return Configuration(
debug=self.debug,
documents=documents,
leave_kubectl=self.leave_kubectl,
substitute=False,
validate=False)
return Configuration(debug=self.debug,
documents=documents,
leave_kubectl=self.leave_kubectl,
substitute=False,
validate=False)
@property
def kubelet_name(self):

View File

@ -33,8 +33,8 @@ def start_api():
ContextMiddleware(),
LoggingMiddleware(),
]
control_api = falcon.API(
request_type=PromenadeRequest, middleware=middlewares)
control_api = falcon.App(request_type=PromenadeRequest,
middleware=middlewares)
# v1.0 of Promenade API
v1_0_routes = [
@ -72,10 +72,9 @@ class VersionsResource(BaseResource):
"""
def on_get(self, req, resp):
resp.body = self.to_json({
'v1.0': {
resp.body = self.to_json(
{'v1.0': {
'path': '/api/v1.0',
'status': 'stable'
}
})
}})
resp.status = falcon.HTTP_200

View File

@ -28,6 +28,7 @@ LOG = logging.getLogger(__name__)
class BaseResource(object):
def on_options(self, req, resp, **kwargs):
"""
Handle options requests
@ -56,8 +57,8 @@ class BaseResource(object):
LOG.info('Input message body: %s \nContext: %s' %
(raw_body, req.context))
else:
LOG.info(
'No message body specified. \nContext: %s' % req.context)
LOG.info('No message body specified. \nContext: %s' %
req.context)
if has_input:
# read the json and validate if necessary
try:
@ -72,8 +73,8 @@ class BaseResource(object):
(raw_body, req.context))
raise exc.InvalidFormatError(
title='JSON could not be decoded',
description='%s: Invalid JSON in body: %s' % (req.path,
jex))
description='%s: Invalid JSON in body: %s' %
(req.path, jex))
else:
# No body passed as input. Fail validation if it was asekd for
if validate_json_schema is not None:

View File

@ -89,9 +89,10 @@ class ContextMiddleware(object):
"""
def _format_uuid_string(self, string):
return (string.replace('urn:', '').replace('uuid:',
'').strip('{}').replace(
'-', '').lower())
return (string.replace('urn:',
'').replace('uuid:',
'').strip('{}').replace('-',
'').lower())
def _is_uuid_like(self, val):
try:
@ -115,16 +116,16 @@ class ContextMiddleware(object):
class LoggingMiddleware(object):
def process_request(self, req, resp):
# don't log health checks
if not req.url.endswith('/health'):
ctx = req.context
LOG.info(
"Request: %s %s %s",
req.method,
req.uri,
req.query_string,
ctx=ctx)
LOG.info("Request: %s %s %s",
req.method,
req.uri,
req.query_string,
ctx=ctx)
def process_response(self, req, resp, resource, req_succeeded):
ctx = req.context
@ -132,10 +133,9 @@ class LoggingMiddleware(object):
if req.url.endswith('/health'):
resp_code = self._get_resp_code(resp)
if not resp_code == 204:
LOG.error(
'Health check has failed with response status %s',
resp.status,
ctx=ctx)
LOG.error('Health check has failed with response status %s',
resp.status,
ctx=ctx)
else:
context_marker = getattr(ctx, 'context_marker', None)
request_id = getattr(ctx, 'request_id', None)
@ -149,12 +149,11 @@ class LoggingMiddleware(object):
resp.append_header('X-END-USER', end_user)
if user is not None:
resp.append_header('X-USER-NAME', user)
LOG.info(
"Response: %s %s %s",
req.method,
req.uri,
resp.status,
ctx=ctx)
LOG.info("Response: %s %s %s",
req.method,
req.uri,
resp.status,
ctx=ctx)
def _get_resp_code(self, resp):
# Falcon response object doesn't have a raw status code.

View File

@ -24,6 +24,7 @@ LOG = logging.getLogger(__name__)
class ValidateDesignResource(base.BaseResource):
@policy.ApiEnforcer('kubernetes_provisioner:post_validatedesign')
def on_post(self, req, resp):
result = ValidationMessage()

View File

@ -46,7 +46,7 @@ def _get_from_deckhand(design_ref, ctx=None):
else:
addl_headers = {}
auth = keystoneauth1.identity.v3.Password(**keystone_args)
session = keystoneauth1.session.Session(
auth=auth, additional_headers=addl_headers)
session = keystoneauth1.session.Session(auth=auth,
additional_headers=addl_headers)
return session.get(design_ref[len(_DECKHAND_PREFIX):], timeout=DH_TIMEOUT)

View File

@ -12,6 +12,7 @@ LOG = logging.getLogger(__name__)
class EncryptionMethod(metaclass=abc.ABCMeta):
@abc.abstractmethod
def encrypt(self, data):
pass
@ -50,6 +51,7 @@ class EncryptionMethod(metaclass=abc.ABCMeta):
class NullEncryptionMethod(EncryptionMethod):
def encrypt(self, data):
LOG.debug('Performing NOOP encryption')
return data

View File

@ -114,18 +114,17 @@ def default_error_serializer(req, resp, exception):
"""
Writes the default error message body, when we don't handle it otherwise
"""
format_error_resp(
req,
resp,
status_code=exception.status,
message=exception.description,
reason=exception.title,
error_type=exception.__class__.__name__,
error_list=[{
'message': exception.description,
'error': True
}],
info_list=None)
format_error_resp(req,
resp,
status_code=exception.status,
message=exception.description,
reason=exception.title,
error_type=exception.__class__.__name__,
error_list=[{
'message': exception.description,
'error': True
}],
info_list=None)
def default_exception_handler(ex, req, resp, params):
@ -140,13 +139,12 @@ def default_exception_handler(ex, req, resp, params):
# take care of the uncaught stuff
exc_string = traceback.format_exc()
LOG.error('Unhanded Exception being handled: \n%s', exc_string)
format_error_resp(
req,
resp,
falcon.HTTP_500,
error_type=ex.__class__.__name__,
message="Unhandled Exception raised: %s" % str(ex),
retry=True)
format_error_resp(req,
resp,
falcon.HTTP_500,
error_type=ex.__class__.__name__,
message="Unhandled Exception raised: %s" % str(ex),
retry=True)
class PromenadeException(Exception):
@ -190,8 +188,8 @@ class PromenadeException(Exception):
self.info_list = info_list
self.retry = retry
self.trace = trace
super().__init__(
PromenadeException._gen_ex_message(title, description))
super().__init__(PromenadeException._gen_ex_message(
title, description))
@staticmethod
def _gen_ex_message(title, description):
@ -204,16 +202,15 @@ class PromenadeException(Exception):
"""
The handler used for app errors and child classes
"""
format_error_resp(
req,
resp,
ex.status,
message=ex.title,
reason=ex.description,
error_list=ex.error_list,
info_list=ex.info_list,
error_type=ex.__class__.__name__,
retry=ex.retry)
format_error_resp(req,
resp,
ex.status,
message=ex.title,
reason=ex.description,
error_list=ex.error_list,
info_list=ex.info_list,
error_type=ex.__class__.__name__,
retry=ex.retry)
def display(self, debug=False):
if self.trace or debug:
@ -300,8 +297,9 @@ class InvalidFormatError(PromenadeException):
title = self.title
if not description:
description = self.title
super(InvalidFormatError, self).__init__(
title, description, status=self.status)
super(InvalidFormatError, self).__init__(title,
description,
status=self.status)
class ValidationException(PromenadeException):

View File

@ -10,6 +10,7 @@ LOG = logging.getLogger(__name__)
class Generator:
def __init__(self, config, block_strings=True):
self.config = config
self.keys = pki.PKI(block_strings=block_strings)
@ -69,8 +70,10 @@ class Generator:
def gen_cert(self, document_name, *, ca_cert, ca_key, **kwargs):
ca_cert_data = ca_cert['data']
ca_key_data = ca_key['data']
return self.keys.generate_certificate(
document_name, ca_cert=ca_cert_data, ca_key=ca_key_data, **kwargs)
return self.keys.generate_certificate(document_name,
ca_cert=ca_cert_data,
ca_key=ca_key_data,
**kwargs)
def gen_keypair(self, document_name):
return self.keys.generate_keypair(document_name)
@ -95,9 +98,9 @@ class Generator:
document_name, kinds)
return docs
else:
raise exceptions.IncompletePKIPairError(
'Incomplete set %s '
'for name: %s' % (kinds, document_name))
raise exceptions.IncompletePKIPairError('Incomplete set %s '
'for name: %s' %
(kinds, document_name))
else:
docs = self._find_in_outputs(schemas, document_name)
@ -129,17 +132,16 @@ class Generator:
documents = self.get_documents()
with open(os.path.join(output_dir, 'certificates.yaml'), 'w') as f:
# Don't use safe_dump_all so we can block format certificate data.
yaml.dump_all(
documents,
stream=f,
default_flow_style=False,
explicit_start=True,
indent=2)
yaml.dump_all(documents,
stream=f,
default_flow_style=False,
explicit_start=True,
indent=2)
def get_documents(self):
return list(
itertools.chain.from_iterable(
v.values() for v in self.outputs.values()))
itertools.chain.from_iterable(v.values()
for v in self.outputs.values()))
def get_host_list(service_names):

View File

@ -115,9 +115,10 @@ def _get_update_labels(existing_labels, input_labels):
# no existing labels found
if not existing_labels:
# filter delete label request since there is no labels set on a node
update_labels.update(
{k: v
for k, v in input_labels.items() if v is not None})
update_labels.update({
k: v
for k, v in input_labels.items() if v is not None
})
return update_labels
# new labels or overriding labels

View File

@ -58,6 +58,7 @@ DEFAULT_CONFIG = {
class BlankContextFilter(logging.Filter):
def filter(self, record):
for key in BLANK_CONTEXT_VALUES:
if getattr(record, key, None) is None:
@ -66,6 +67,7 @@ class BlankContextFilter(logging.Filter):
class Adapter(logging.LoggerAdapter):
def process(self, msg, kwargs):
extra = kwargs.get('extra', {})

View File

@ -10,11 +10,10 @@ def setup(disable_keystone=False):
log_group = cfg.OptGroup(name='logging', title='Logging options')
cfg.CONF.register_group(log_group)
logging_options = [
cfg.StrOpt(
'log_level',
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
default='DEBUG',
help='Global log level for PROMENADE')
cfg.StrOpt('log_level',
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
default='DEBUG',
help='Global log level for PROMENADE')
]
cfg.CONF.register_opts(logging_options, group=log_group)
if disable_keystone is False:

View File

@ -13,6 +13,7 @@ LOG = logging.getLogger(__name__)
class PKI:
def __init__(self, *, block_strings=True):
self.block_strings = block_strings
self._ca_config_string = None
@ -193,8 +194,9 @@ class block_literal(str):
def block_literal_representer(dumper, data):
return dumper.represent_scalar(
'tag:yaml.org,2002:str', str(data), style='|')
return dumper.represent_scalar('tag:yaml.org,2002:str',
str(data),
style='|')
yaml.add_representer(block_literal, block_literal_representer)

View File

@ -25,10 +25,9 @@ LOG = logging.getLogger(__name__)
policy_engine = None
POLICIES = [
op.RuleDefault(
'admin_required',
'role:admin or is_admin:1',
description='Actions requiring admin authority'),
op.RuleDefault('admin_required',
'role:admin or is_admin:1',
description='Actions requiring admin authority'),
op.DocumentedRuleDefault('kubernetes_provisioner:get_join_scripts',
'role:admin', 'Get join script for node',
[{
@ -51,6 +50,7 @@ POLICIES = [
class PromenadePolicy:
def __init__(self):
self.enforcer = op.Enforcer(cfg.CONF)
@ -72,18 +72,18 @@ class ApiEnforcer(object):
self.action = action
def __call__(self, f):
@functools.wraps(f)
def secure_handler(slf, req, resp, *args, **kwargs):
ctx = req.context
policy_eng = ctx.policy_engine
# policy engine must be configured
if policy_eng is not None:
LOG.debug(
'Enforcing policy %s on request %s using engine %s',
self.action,
ctx.request_id,
policy_eng.__class__.__name__,
ctx=ctx)
LOG.debug('Enforcing policy %s on request %s using engine %s',
self.action,
ctx.request_id,
policy_eng.__class__.__name__,
ctx=ctx)
else:
LOG.error('No policy engine configured', ctx=ctx)
raise ex.PromenadeException(
@ -97,34 +97,30 @@ class ApiEnforcer(object):
LOG.debug('Request is authorized', ctx=ctx)
authorized = True
except Exception:
LOG.exception(
'Error authorizing request for action %s',
self.action,
ctx=ctx)
raise ex.ApiError(
title="Expectation Failed",
status=falcon.HTTP_417,
retry=False)
LOG.exception('Error authorizing request for action %s',
self.action,
ctx=ctx)
raise ex.ApiError(title="Expectation Failed",
status=falcon.HTTP_417,
retry=False)
if authorized:
return f(slf, req, resp, *args, **kwargs)
else:
# raise the appropriate response exeception
if ctx.authenticated:
LOG.error(
'Unauthorized access attempted for action %s',
self.action,
ctx=ctx)
LOG.error('Unauthorized access attempted for action %s',
self.action,
ctx=ctx)
raise ex.ApiError(
title="Forbidden",
status=falcon.HTTP_403,
description="Credentials do not permit access",
retry=False)
else:
LOG.error(
'Unathenticated access attempted for action %s',
self.action,
ctx=ctx)
LOG.error('Unathenticated access attempted for action %s',
self.action,
ctx=ctx)
raise ex.ApiError(
title="Unauthenticated",
status=falcon.HTTP_401,

View File

@ -30,8 +30,8 @@ def build_tarball_from_roles(config, *, roles, file_specs):
def insert_charts_into_bundler(bundler):
for root, _dirnames, filenames in os.walk(
'/opt/promenade/charts', followlinks=True):
for root, _dirnames, filenames in os.walk('/opt/promenade/charts',
followlinks=True):
for source_filename in filenames:
if _source_file_is_excluded(source_filename):
continue
@ -43,8 +43,9 @@ def insert_charts_into_bundler(bundler):
LOG.debug('Copying asset file %s (mode=%o)', source_path,
stat.st_mode)
with open(source_path) as f:
bundler.add(
path=destination_path, data=f.read(), mode=stat.st_mode)
bundler.add(path=destination_path,
data=f.read(),
mode=stat.st_mode)
def render_role_into_bundler(*, bundler, config, role):
@ -57,12 +58,11 @@ def render_role_into_bundler(*, bundler, config, role):
stat = os.stat(source_path)
LOG.debug('Rendering file %s (mode=%o)', source_path, stat.st_mode)
destination_path = os.path.join(destination_base, source_filename)
render_template_into_bundler(
bundler=bundler,
config=config,
destination_path=destination_path,
source_path=source_path,
mode=stat.st_mode)
render_template_into_bundler(bundler=bundler,
config=config,
destination_path=destination_path,
source_path=source_path,
mode=stat.st_mode)
def render_template_into_bundler(*, bundler, config, destination_path,
@ -126,8 +126,8 @@ def _base64_encode(s):
def _fill_no_proxy(network_config):
proxy = network_config.get('proxy', {}).get('url')
if proxy:
additional = network_config.get('proxy', {}).get(
'additional_no_proxy', [])
additional = network_config.get('proxy',
{}).get('additional_no_proxy', [])
if additional:
return ','.join(additional) + ',' + _default_no_proxy(
network_config)
@ -145,8 +145,8 @@ def _default_no_proxy(network_config):
'kubernetes',
'kubernetes.default',
'kubernetes.default.svc',
'kubernetes.default.svc.%s' % network_config.get('dns', {}).get(
'cluster_domain', 'cluster.local'),
'kubernetes.default.svc.%s' %
network_config.get('dns', {}).get('cluster_domain', 'cluster.local'),
]
return ','.join(include)

View File

@ -11,6 +11,7 @@ LOG = logging.getLogger(__name__)
class TarBundler:
def __init__(self):
self._tar_blob = io.BytesIO()
self._tf = tarfile.open(fileobj=self._tar_blob, mode='w|gz')

View File

@ -140,8 +140,8 @@ def _load_schemas():
for schema in yaml.safe_load_all(f):
name = schema['metadata']['name']
if name in SCHEMAS:
raise RuntimeError(
'Duplicate schema specified for: %s' % name)
raise RuntimeError('Duplicate schema specified for: %s' %
name)
SCHEMAS[name] = schema['data']

View File

@ -1,19 +1,35 @@
Beaker==1.12.0
click==8.1.3
falcon==3.1.1
Jinja2==3.1.2
jsonpath-ng==1.5.3
jsonschema==3.2.0
keystoneauth1==5.1.1
keystonemiddleware==10.2.0
setuptools==67.0.0
#
Beaker<=1.12.0
click
ConfigParser
Deckhand @ git+https://opendev.org/airship/deckhand.git@ac4edb0c64c9f9af62e7cb63f049508596d25747#egg=deckhand
docutils
falcon
fixtures
importlib_metadata
Jinja2
jsonpath_ng
jsonschema<=3.2.0
keystoneauth1<=5.1.1
kubernetes==26.1.0
oslo.context==5.0.0
oslo.policy==4.0.0
PasteDeploy==3.0.1
MarkupSafe<2.1.0, >=0.9.2
mock
nose
oslo.config<=8.7.1
oslo.context<=4.1.0
oslo.policy<=3.10.1
pylibyaml==0.1.0
PyYAML==5.4.1
requests==2.28.2
uWSGI==2.0.21
Deckhand @ git+https://opendev.org/airship/deckhand.git@70aa35a396d5f76753616f5289228f9c2b0e7ec7
# Deckhand @ git+https://review.opendev.org/airship/deckhand@refs/changes/93/869293/222#egg=deckhand
PyYAML<=5.4.1
reno
requests==2.27.0
setuptools<=45.2.0
six
Sphinx
sphinx-rtd-theme==0.5.0
testrepository
testresources
testscenarios
testtools<=2.5.0
urllib3 >= 1.21.1, <= 1.25.11
virtualenv
wheel

View File

@ -1,136 +1,175 @@
alabaster==0.7.13
alembic==1.7.1
amqp==5.0.8
attrs==22.2.0
alembic==1.4.3
amqp==2.6.1
argcomplete==3.0.5
attrs==23.1.0
autopage==0.5.1
Babel==2.11.0
Babel==2.12.1
bandit==1.6.0
bcrypt==4.0.1
Beaker==1.12.0
cachetools==5.3.0
certifi==2022.12.7
cffi==1.15.1
charset-normalizer==3.0.1
chardet==3.0.4
charset-normalizer==2.0.12
click==8.1.3
cliff==4.2.0
cmd2==2.4.3
configparser==5.3.0
coverage==7.2.3
cryptography==3.4.8
debtcollector==2.5.0
Deckhand @ git+https://opendev.org/airship/deckhand.git@70aa35a396d5f76753616f5289228f9c2b0e7ec7
Deckhand @ git+https://opendev.org/airship/deckhand.git@ac4edb0c64c9f9af62e7cb63f049508596d25747#egg=deckhand
decorator==5.1.1
deepdiff==5.8.1
distlib==0.3.6
dnspython==2.3.0
docutils==0.17.1
docutils==0.19
dogpile.cache==1.1.8
entrypoints==0.3
dulwich==0.21.3
eventlet==0.33.3
exceptiongroup==1.1.1
extras==1.0.0
falcon==3.1.1
fasteners==0.18
filelock==3.12.0
fixtures==3.0.0
flake8==3.7.9
future==0.18.3
flake8==3.8.4
futurist==2.4.1
google-auth==2.16.1
gitdb==4.0.10
GitPython==3.1.31
google-auth==2.17.3
greenlet==2.0.2
hacking==3.0.1
hacking==4.1.0
html5lib==0.9999999
httpexceptor==1.4.0
idna==3.4
imagesize==1.4.1
importlib-metadata==6.0.0
importlib-resources==5.12.0
importlib-metadata==6.5.0
iniconfig==2.0.0
iso8601==1.1.0
Jinja2==3.1.2
jsonpath-ng==1.5.3
jsonpickle==3.0.1
jsonpath-rw==1.4.0
jsonpath-rw-ext==1.2.2
jsonpickle==1.4.1
jsonschema==3.2.0
keystoneauth1==5.1.1
keystonemiddleware==10.2.0
kombu==5.1.0
kombu==4.6.11
kubernetes==26.1.0
Mako==1.2.4
MarkupSafe==2.1.2
MarkupSafe==2.0.1
mccabe==0.6.1
msgpack==1.0.4
mock==5.0.2
msgpack==1.0.5
netaddr==0.8.0
netifaces==0.11.0
networkx==2.6.2
networkx==3.1
nose==1.3.7
oauthlib==3.2.2
ordered-set==4.1.0
os-service-types==1.7.0
oslo.cache==2.8.2
oslo.concurrency==4.4.1
oslo.cache==2.10.1
oslo.concurrency==5.1.1
oslo.config==8.7.1
oslo.context==5.0.0
oslo.db==11.0.0
oslo.context==4.1.0
oslo.db==10.0.0
oslo.i18n==6.0.0
oslo.log==4.6.0
oslo.messaging==12.9.4
oslo.messaging==12.13.0
oslo.metrics==0.6.0
oslo.middleware==4.4.0
oslo.policy==4.0.0
oslo.policy==3.10.1
oslo.serialization==4.2.0
oslo.service==3.1.1
oslo.utils==4.10.2
packaging==23.0
oslo.utils==4.12.3
packaging==21.3
Paste==3.5.0
PasteDeploy==3.0.1
pbr==5.6.0
PasteScript==3.3.0
pbr==5.5.1
pip==23.0.1
platformdirs==3.2.0
pluggy==1.0.0
ply==3.11
prettytable==3.6.0
prettytable==3.7.0
prometheus-client==0.16.0
psycopg2-binary==2.9.5
pyasn1==0.4.8
pyasn1-modules==0.2.8
psycopg2-binary==2.9.6
pyasn1==0.5.0
pyasn1-modules==0.3.0
pycadf==3.1.1
pycodestyle==2.5.0
pycodestyle==2.6.0
pycparser==2.21
pyflakes==2.1.1
pyflakes==2.2.0
Pygments==2.14.0
pyinotify==0.9.6
pylibyaml==0.1.0
pyparsing==3.0.9
pymongo==4.3.3
pyparsing==2.4.7
pyperclip==1.8.2
pyproject_api==1.5.0
pyrsistent==0.19.3
pytest==7.3.1
pytest-cov==4.0.0
python-barbicanclient==5.2.0
python-dateutil==2.8.2
python-keystoneclient==3.22.0
python-editor==1.0.4
python-keystoneclient==5.1.0
python-memcached==1.59
python-subunit==1.4.2
pytz==2022.7.1
python-mimeparse==1.6.0
python-subunit==1.4.0
pytz==2023.3
PyYAML==5.4.1
reno==4.0.0
repoze.lru==0.7
requests==2.28.2
requests==2.27.0
requests-oauthlib==1.3.1
resolver==0.2.1
rfc3986==2.0.0
Routes==2.5.1
rsa==4.9
selector==0.10.1
setuptools==45.2.0
simplejson==3.19.1
six==1.16.0
smmap==5.0.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==1.1.1
Sphinx==6.1.3
sphinx-rtd-theme==0.5.0
sphinxcontrib-applehelp==1.0.4
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
SQLAlchemy==1.4.23
SQLAlchemy==1.3.20
sqlalchemy-migrate==0.13.0
sqlparse==0.4.3
sqlparse==0.4.4
statsd==4.0.1
stestr==3.2.0
stevedore==4.1.1
stevedore==5.0.0
Tempita==0.5.2
testrepository==0.0.20
testresources==2.0.1
testscenarios==0.5.0
testtools==2.5.0
urllib3==1.26.6
tiddlyweb==2.4.3
tomli==2.0.1
tomlkit==0.11.7
typing_extensions==4.5.0
urllib3==1.25.11
uWSGI==2.0.21
vine==5.0.0
voluptuous==0.13.1
vine==1.3.0
virtualenv==20.22.0
wcwidth==0.2.6
WebOb==1.8.7
websocket-client==1.5.1
Werkzeug==2.0.1
wrapt==1.14.1
Werkzeug==2.1.2
wheel==0.40.0
wrapt==1.15.0
wsgi-intercept==1.11.0
xmltodict==0.13.0
yapf==0.33.0
yappi==1.4.0
zipp==3.14.0
yq==3.2.1
zipp==3.15.0

View File

@ -1,5 +1,6 @@
[metadata]
name = promenade
version = 1.1
summary = Promenade is a tool for bootstrapping a resilient kubernetes cluster and managing its life-cycle via helm charts.
description_file = README.md
author = The Airship Authors
@ -13,6 +14,7 @@ classifier =
Operating System :: POSIX :: Linux
Programming Language :: Python :: 3
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.10
[files]
packages =

View File

@ -1,4 +1,5 @@
pytest
flake8==3.7.9
bandit>=1.5
yapf==0.24.0
pytest >= 3.0
pytest-cov==4.0.0
flake8==3.8.4
bandit==1.6.0
yapf

View File

@ -62,8 +62,9 @@ def test_node_labels_pass(mock_kubeclient, mock_update_node_labels, client,
"""
mock_kubeclient.return_value = None
mock_update_node_labels.return_value = _mock_update_node_labels()
response = client.simulate_put(
'/api/v1.0/node-labels/ubuntubox', headers=req_header, body=req_body)
response = client.simulate_put('/api/v1.0/node-labels/ubuntubox',
headers=req_header,
body=req_body)
assert response.status == falcon.HTTP_200
assert response.json["status"] == "Success"
@ -77,8 +78,9 @@ def test_node_labels_missing_inputs(client, req_header, req_body):
req_header: API request header
req_body: API request body
"""
response = client.simulate_post(
'/api/v1.0/node-labels', headers=req_header, body=req_body)
response = client.simulate_post('/api/v1.0/node-labels',
headers=req_header,
body=req_body)
assert response.status == falcon.HTTP_404

View File

@ -49,8 +49,9 @@ def std_body():
def test_post_validatedesign_empty_docs(client, std_body, std_headers):
with mock.patch('promenade.design_ref.get_documents') as gd:
gd.return_value = ([], False)
response = client.simulate_post(
'/api/v1.0/validatedesign', headers=std_headers, body=std_body)
response = client.simulate_post('/api/v1.0/validatedesign',
headers=std_headers,
body=std_body)
assert response.status == falcon.HTTP_400
assert response.json['details']['errorCount'] == 5
@ -93,10 +94,8 @@ VALID_DOCS = [
'files': [{
'content':
'# placeholder for triggering calico etcd bootstrapping',
'mode':
420,
'path':
'/var/lib/anchor/calico-etcd-bootstrap'
'mode': 420,
'path': '/var/lib/anchor/calico-etcd-bootstrap'
}],
'hostname':
'n0',
@ -107,10 +106,8 @@ VALID_DOCS = [
'registry.k8s.io/kube-apiserver-amd64:v1.26.0',
'controller-manager':
'registry.k8s.io/kube-controller-manager-amd64:v1.26.0',
'etcd':
'quay.io/coreos/etcd:v3.5.4',
'scheduler':
'registry.k8s.io/kube-scheduler-amd64:v1.26.0'
'etcd': 'quay.io/coreos/etcd:v3.5.4',
'scheduler': 'registry.k8s.io/kube-scheduler-amd64:v1.26.0'
}
},
'ip':
@ -137,8 +134,7 @@ VALID_DOCS = [
},
{
'data': {
'files':
[{
'files': [{
'mode':
365,
'path':
@ -147,15 +143,12 @@ VALID_DOCS = [
'kubernetes/node/bin/kubelet',
'tar_url':
'https://dl.k8s.io/v1.26.0/kubernetes-node-linux-amd64.tar.gz'
},
{
'content':
'/var/lib/docker/containers/*/*-json.log\n{\n compress\n copytruncate\n create 0644 root root\n daily\n dateext\n dateformat -%Y%m%d-%s\n maxsize 10M\n missingok\n notifempty\n su root root\n rotate 1\n}',
'mode':
292,
'path':
'/etc/logrotate.d/json-logrotate'
}],
}, {
'content':
'/var/lib/docker/containers/*/*-json.log\n{\n compress\n copytruncate\n create 0644 root root\n daily\n dateext\n dateformat -%Y%m%d-%s\n maxsize 10M\n missingok\n notifempty\n su root root\n rotate 1\n}',
'mode': 292,
'path': '/etc/logrotate.d/json-logrotate'
}],
'images': {
'haproxy': 'haproxy:1.8.3',
'helm': {
@ -261,7 +254,8 @@ VALID_DOCS = [
def test_post_validatedesign_valid_docs(client, std_body, std_headers):
with mock.patch('promenade.design_ref.get_documents') as gd:
gd.return_value = (VALID_DOCS, False)
response = client.simulate_post(
'/api/v1.0/validatedesign', headers=std_headers, body=std_body)
response = client.simulate_post('/api/v1.0/validatedesign',
headers=std_headers,
body=std_body)
assert response.status == falcon.HTTP_200
assert response.json['details']['errorCount'] == 0

View File

@ -86,9 +86,9 @@ def _valid_dg(config, dynamic_tag, context_name=None):
if config.get('context', {}).get(context_name):
return True
else:
raise TagGenExeception(
'Dynamic tag "%s" requested, but "%s"'
' not found in context' % (dynamic_tag, context_name))
raise TagGenExeception('Dynamic tag "%s" requested, but "%s"'
' not found in context' %
(dynamic_tag, context_name))
else:
return False

View File

@ -3,6 +3,9 @@
set -ex
export DEBIAN_FRONTEND=noninteractive
echo 'debconf debconf/frontend select Noninteractive' | sudo debconf-set-selections
CFSSL_URL=${CFSSL_URL:-https://pkg.cfssl.org/R1.2/cfssl_linux-amd64}
if [[ ! $(command -v cfssl) ]]; then

View File

@ -1,20 +1,13 @@
- hosts: all
roles:
- bindep
- ensure-docker
- ensure-python
- ensure-pip
tasks:
- include_vars: vars.yaml
- name: Ensure pip
include_role:
name: ensure-pip
- name: Clear firewall
include_role:
name: clear-firewall
- name: Ensure docker
include_role:
name: ensure-docker
- name: Debug tag generation inputs
block:
- debug:
@ -41,6 +34,14 @@
debug:
var: image_tags
- name: Install Docker python module for ansible docker login
block:
- pip:
name: docker
version: 4.4.4
executable: pip3
become: True
- name: Make images
when: not publish
block:

View File

@ -11,6 +11,12 @@
# limitations under the License.
- hosts: primary
roles:
- bindep
- ensure-docker
- ensure-python
- ensure-pip
tasks:
- name: Execute the make target for basic testing
make:

36
tox.ini
View File

@ -14,16 +14,18 @@ pass_env =
commands = {posargs}
[testenv:py38]
allowlist_externals =
pytest
setenv =
PYTHONWARNING=all
deps = -r{toxinidir}/requirements-frozen.txt
-r{toxinidir}/test-requirements.txt
deps =
-r{toxinidir}/requirements-frozen.txt
commands =
pytest {posargs}
[testenv:bandit]
deps =
-r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements-frozen.txt
commands =
bandit --skip B324 -r promenade
@ -33,34 +35,46 @@ allowlist_externals =
rm
deps =
-r{toxinidir}/doc/requirements.txt
-r{toxinidir}/requirements-frozen.txt
commands =
rm -rf doc/build
sphinx-build -W -b html doc/source doc/build/html
[testenv:fmt]
deps =
-r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements-frozen.txt
allowlist_externals =
yapf
commands =
yapf -ir {toxinidir}/promenade {toxinidir}/tests {toxinidir}/tools/image_tags.py
[testenv:freeze]
deps = -r{toxinidir}/requirements-direct.txt
recreate = True
allowlist_externals = sh
grep
allowlist_externals=
rm
sh
deps=
-r{toxinidir}/requirements-direct.txt
-r{toxinidir}/test-requirements.txt
commands=
sh -c "pip freeze | grep -vE '^(promenade)|(pkg-resources)' > {toxinidir}/requirements-frozen.txt"
rm -f requirements-frozen.txt
rm -f requirements-tree.txt
sh -c "pip freeze --all | grep -vE 'promenade|pyinotify|pkg-resources' > requirements-frozen.txt"
[testenv:gate-lint]
deps =
jsonschema==2.6.0
-r{toxinidir}/requirements-frozen.txt
allowlist_externals = sh
commands =
{toxinidir}/tools/lint_gate.sh
sh -c "{toxinidir}/tools/lint_gate.sh"
[testenv:pep8]
deps =
-r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements-frozen.txt
allowlist_externals =
yapf
bandit
flake8
commands =
yapf -rd {toxinidir}/promenade {toxinidir}/tests {toxinidir}/tools/image_tags.py
flake8 {toxinidir}/promenade

View File

@ -46,7 +46,7 @@
name: airship-promenade-chart-build-latest-htk
description: |
Lints charts using latest HTK
voting: false
voting: true
run: tools/zuul/playbooks/helm-linter.yaml
timeout: 300
nodeset: airship-promenade-single-node-focal

View File

@ -22,7 +22,7 @@
check:
jobs:
- openstack-tox-pep8
# - openstack-tox-py38
- openstack-tox-docs
- airship-promenade-lint-ws
- airship-promenade-docker-build-gate
- airship-promenade-chart-build-gate
@ -32,6 +32,7 @@
gate:
jobs:
- openstack-tox-pep8
- openstack-tox-docs
- airship-promenade-lint-ws
- airship-promenade-docker-build-gate
- airship-promenade-chart-build-gate