Adding ability to specify custom config files
Example: files: keystone-conf: /tmp/keystone In that case source file will be taken not from `content` path, but from path defined in files config. Change-Id: If2e71887adca9148f98b555ef8d6033211fe6375
This commit is contained in:
parent
2ffb9d57d0
commit
facf0f6282
|
@ -6,6 +6,7 @@ import os
|
|||
from fuel_ccp.config import _yaml
|
||||
from fuel_ccp.config import builder
|
||||
from fuel_ccp.config import cli
|
||||
from fuel_ccp.config import files
|
||||
from fuel_ccp.config import images
|
||||
from fuel_ccp.config import kubernetes
|
||||
from fuel_ccp.config import registry
|
||||
|
@ -55,7 +56,7 @@ CONF = _Wrapper()
|
|||
|
||||
CONFIG_MODULES = [
|
||||
builder, cli, images, kubernetes, registry, replicas, repositories,
|
||||
sources, url,
|
||||
sources, url, files,
|
||||
]
|
||||
|
||||
|
||||
|
@ -116,7 +117,7 @@ def load_component_defaults():
|
|||
from fuel_ccp.common import utils
|
||||
|
||||
sections = ['versions', 'sources', 'configs', 'nodes', 'roles', 'replicas',
|
||||
'url']
|
||||
'url', 'files']
|
||||
new_config = _yaml.AttrDict((k, _yaml.AttrDict()) for k in sections)
|
||||
for path in utils.get_config_paths():
|
||||
if not os.path.exists(path):
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
DEFAULTS = {
|
||||
'files': {}
|
||||
}
|
||||
|
||||
SCHEMA = {
|
||||
'files': {
|
||||
'type': 'object',
|
||||
"additionalProperties": {
|
||||
"type": "string",
|
||||
"minimum": 1,
|
||||
},
|
||||
},
|
||||
}
|
|
@ -45,22 +45,32 @@ def _get_configmaps_version(configmaps, service_dir, files, configs):
|
|||
"""
|
||||
versions = ''.join(cm.obj['metadata']['resourceVersion']
|
||||
for cm in configmaps)
|
||||
files_hash = _get_service_files_hash(service_dir, files, configs)
|
||||
files_hash = _get_service_files_hash(files, configs)
|
||||
|
||||
return versions + files_hash
|
||||
|
||||
|
||||
def _get_service_files_hash(service_dir, files, configs):
|
||||
def _get_service_files_hash(files, configs):
|
||||
data = {}
|
||||
if files:
|
||||
for filename, f in files.items():
|
||||
path = os.path.join(service_dir, "files", f["content"])
|
||||
data[filename] = jinja_utils.jinja_render(
|
||||
path, configs, ignore_undefined=True)
|
||||
f["content"], configs, ignore_undefined=True)
|
||||
dump = json.dumps(data, sort_keys=True).encode("utf-8")
|
||||
return hashlib.sha1(dump).hexdigest()
|
||||
|
||||
|
||||
def process_files(files, service_dir):
|
||||
if not files:
|
||||
return
|
||||
for filename, f in files.items():
|
||||
if CONF.files.get(filename):
|
||||
content = CONF.files.get(filename)
|
||||
else:
|
||||
content = os.path.join(service_dir, "files", f["content"])
|
||||
f["content"] = content
|
||||
|
||||
|
||||
def parse_role(component, topology, configmaps):
|
||||
service_dir = component["service_dir"]
|
||||
role = component["service_content"]
|
||||
|
@ -71,6 +81,7 @@ def parse_role(component, topology, configmaps):
|
|||
LOG.info("Scheduling service %s deployment", service_name)
|
||||
_expand_files(service, role.get("files"))
|
||||
|
||||
process_files(role.get("files"), service_dir)
|
||||
files_cm = _create_files_configmap(
|
||||
service_dir, service_name, role.get("files"))
|
||||
meta_cm = _create_meta_configmap(service)
|
||||
|
@ -306,8 +317,7 @@ def _create_files_configmap(service_dir, service_name, files):
|
|||
data = {}
|
||||
if files:
|
||||
for filename, f in files.items():
|
||||
with open(os.path.join(
|
||||
service_dir, "files", f["content"]), "r") as f:
|
||||
with open(f["content"], "r") as f:
|
||||
data[filename] = f.read()
|
||||
data["placeholder"] = ""
|
||||
template = templates.serialize_configmap(configmap_name, data)
|
||||
|
|
|
@ -153,7 +153,7 @@ class TestDeploy(base.TestCase):
|
|||
return_value='rendered'))
|
||||
expected_hash = '86e85bd63aef5a740d4b7b887ade37ec9017c961'
|
||||
self.assertEqual(
|
||||
expected_hash, deploy._get_service_files_hash('/tmp', files, {}))
|
||||
expected_hash, deploy._get_service_files_hash(files, {}))
|
||||
|
||||
|
||||
class TestDeployProcessPorts(base.TestCase):
|
||||
|
|
Loading…
Reference in New Issue