diff --git a/fuel_ccp/common/utils.py b/fuel_ccp/common/utils.py index 525ebb36..beacf660 100644 --- a/fuel_ccp/common/utils.py +++ b/fuel_ccp/common/utils.py @@ -169,6 +169,19 @@ def get_deploy_components_info(rendering_context=None): return components_map +def get_dependencies_map(components_map): + deps_map = {} + for service_name, service in components_map.items(): + containers = service['service_content']['service']['containers'] + for cont in containers: + deps_map[cont['name']] = service_name + for job in itertools.chain(cont.get('pre', []), + cont.get('post', [])): + if job.get('type') == 'single': + deps_map[job['name']] = service_name + return deps_map + + def get_deployed_components(): """Returns set of deployed components.""" deployed_deployments = kubernetes.list_cluster_deployments() diff --git a/fuel_ccp/deploy.py b/fuel_ccp/deploy.py index 0de44e03..6800f380 100644 --- a/fuel_ccp/deploy.py +++ b/fuel_ccp/deploy.py @@ -183,7 +183,7 @@ def _parse_workflows(service): wf = {} _create_pre_commands(wf, cont) - _create_daemon(wf, cont) + _create_daemon(wf, cont, service['name']) _create_post_commands(wf, cont) workflows.update({cont["name"]: {"workflow": wf}}) return workflows @@ -193,10 +193,10 @@ def _create_job_wfs(container, service_name): wfs = {} for job in container.get("pre", ()): if _is_single_job(job): - wfs.update(_create_job_wf(job)) + wfs.update(_create_job_wf(job, service_name)) for job in container.get("post", ()): if _is_single_job(job): - wfs.update(_create_job_wf(job, True, service_name)) + wfs.update(_create_job_wf(job, service_name, True)) return wfs @@ -251,14 +251,15 @@ def _create_pre_commands(workflow, container): _create_command(workflow["pre"], cmd) -def _create_daemon(workflow, container): - workflow["name"] = container["name"] +def _create_daemon(workflow, container, service_name): + workflow["name"] = "%s/%s" % (service_name, container["name"]) daemon = container["daemon"] workflow["dependencies"] = [] # TODO(sreshetniak): add files from job for cmd in container.get("pre", ()): if cmd.get("type", "local") == "single": - workflow["dependencies"].append(cmd["name"]) + workflow["dependencies"].append("%s/%s" % ( + service_name, cmd["name"])) workflow["dependencies"].extend(daemon.get("dependencies", ())) workflow["daemon"] = {} _fill_cmd(workflow["daemon"], daemon) @@ -316,9 +317,9 @@ def _create_command(workflow, cmd): workflow.append(cmd_flow) -def _create_job_wf(job, post=False, service_name=None): +def _create_job_wf(job, service_name, post=False): wrk = {} - wrk["name"] = job["name"] + wrk["name"] = "%s/%s" % (service_name, job["name"]) wrk["dependencies"] = job.get("dependencies", []) if post: wrk["dependencies"].append(service_name) @@ -615,6 +616,17 @@ def _create_registry_secret(): kubernetes.process_object(secret) +def process_dependencies(service, deps_map): + containers = service['service_content']['service']['containers'] + for cont in containers: + for cmd in itertools.chain( + cont.get('pre', []), [cont['daemon']], + cont.get('post', [])): + cmd['dependencies'] = ["%s/%s" % ( + deps_map[dep.split(':')[0]], dep) for dep in cmd.get( + 'dependencies', [])] + + def deploy_components(components_map, components): topology = _make_topology(CONF.nodes, CONF.roles, CONF.replicas) @@ -644,11 +656,14 @@ def deploy_components(components_map, components): exports_cm = _create_exports_configmap(exports_map) exports_ctx = {'files_header': j2_imports_files_header, 'map': exports_map} + deps_map = utils.get_dependencies_map(components_map) + configmaps = (start_script_cm, exports_cm) upgrading_components = {} for service_name in components: service = components_map[service_name] + process_dependencies(service, deps_map) service["service_content"]['service']['exports_ctx'] = exports_ctx objects_gen = parse_role(service, topology, configmaps) objects = list(itertools.chain.from_iterable(objects_gen)) diff --git a/fuel_ccp/tests/test_deploy.py b/fuel_ccp/tests/test_deploy.py index 7199e598..676c1e42 100644 --- a/fuel_ccp/tests/test_deploy.py +++ b/fuel_ccp/tests/test_deploy.py @@ -344,8 +344,8 @@ class TestDeployParseWorkflow(base.TestCase): expected_workflows = { "kenny": { "workflow": { - "name": "kenny", - "dependencies": ["cartman-mom", "stan", "kyle"], + "name": "south-park/kenny", + "dependencies": ["south-park/cartman-mom", "stan", "kyle"], "pre": [], "post": [], "files": [ @@ -363,7 +363,7 @@ class TestDeployParseWorkflow(base.TestCase): }, "cartman-mom": { "workflow": { - "name": "cartman-mom", + "name": "south-park/cartman-mom", "dependencies": ["cartman-dad"], "job": { "command": "oops" @@ -372,7 +372,7 @@ class TestDeployParseWorkflow(base.TestCase): }, "eric-mom": { "workflow": { - "name": "eric-mom", + "name": "south-park/eric-mom", "dependencies": ["eric-dad", "south-park"], "files": [ {