Merge "Remove docs, deprecated hooks, tests"
This commit is contained in:
commit
5270cc11c9
|
@ -7,6 +7,7 @@
|
|||
required-projects:
|
||||
- openstack-infra/devstack-gate
|
||||
- openstack/heat
|
||||
- openstack/heat-agents
|
||||
- openstack/heat-templates
|
||||
|
||||
- project:
|
||||
|
|
|
@ -19,3 +19,12 @@ This repository provides:
|
|||
* Example templates which demonstrate core Heat functionality
|
||||
* Related image-building templates
|
||||
* Template-related scripts and conversion tools
|
||||
|
||||
============================
|
||||
Software configuration hooks
|
||||
============================
|
||||
|
||||
|
||||
All hooks (heat agents) in heat-templates repository are removed,
|
||||
please use hooks in `heat-agents https://git.openstack.org/cgit/openstack/heat-agents` instead.
|
||||
Here is document entry for heat-agents: `https://docs.openstack.org/heat-agents/latest/`
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
.. heat-templates documentation master file, created by
|
||||
sphinx-quickstart on Thu Jul 20 09:19:39 2017.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to Heat Templates!
|
||||
==========================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`search`
|
|
@ -7,7 +7,10 @@ Heat::InstallConfigAgent.
|
|||
|
||||
This can be used by server user_data when booting a pristine image
|
||||
to install the agent required to use software deployment resources in
|
||||
templates.
|
||||
templates. The templates assume that you have the heat-agents
|
||||
repository checked out alongside the heat-templates repository; if the
|
||||
agent code is in a different location you will need to adjust the paths
|
||||
in the templates.
|
||||
|
||||
The environments only install the heat-config-script hook. If other hooks are
|
||||
required then define your own environment file which defines a resource
|
||||
|
|
|
@ -15,9 +15,9 @@ resources:
|
|||
config:
|
||||
str_replace:
|
||||
params:
|
||||
$heat_config_script: {get_file: ../../elements/heat-config/os-refresh-config/configure.d/55-heat-config}
|
||||
$hook_script: {get_file: ../../elements/heat-config-script/install.d/hook-script.py}
|
||||
$heat_config_notify: {get_file: ../../elements/heat-config/bin/heat-config-notify}
|
||||
$heat_config_script: {get_file: ../../../../../heat-agents/heat-config/os-refresh-config/configure.d/55-heat-config}
|
||||
$hook_script: {get_file: ../../../../../heat-agents/heat-config-script/install.d/hook-script.py}
|
||||
$heat_config_notify: {get_file: ../../../../../heat-agents/heat-config/bin/heat-config-notify}
|
||||
$occ_conf: {get_file: fragments/os-collect-config.conf}
|
||||
$orc_oac: {get_file: fragments/20-os-apply-config}
|
||||
template: {get_file: fragments/configure_config_agent.sh}
|
||||
|
|
|
@ -21,9 +21,9 @@ resources:
|
|||
config:
|
||||
str_replace:
|
||||
params:
|
||||
$heat_config_script: {get_file: ../../elements/heat-config/os-refresh-config/configure.d/55-heat-config}
|
||||
$hook_script: {get_file: ../../elements/heat-config-script/install.d/hook-script.py}
|
||||
$heat_config_notify: {get_file: ../../elements/heat-config/bin/heat-config-notify}
|
||||
$heat_config_script: {get_file: ../../../../../heat-agents/heat-config/os-refresh-config/configure.d/55-heat-config}
|
||||
$hook_script: {get_file: ../../../../../heat-agents/heat-config-script/install.d/hook-script.py}
|
||||
$heat_config_notify: {get_file: ../../../../../heat-agents/heat-config/bin/heat-config-notify}
|
||||
$occ_conf: {get_file: fragments/os-collect-config.conf}
|
||||
$orc_oac: {get_file: fragments/20-os-apply-config}
|
||||
template: {get_file: fragments/configure_config_agent.sh}
|
||||
|
|
|
@ -15,9 +15,9 @@ resources:
|
|||
config:
|
||||
str_replace:
|
||||
params:
|
||||
$heat_config_script: {get_file: ../../elements/heat-config/os-refresh-config/configure.d/55-heat-config}
|
||||
$hook_script: {get_file: ../../elements/heat-config-script/install.d/hook-script.py}
|
||||
$heat_config_notify: {get_file: ../../elements/heat-config/bin/heat-config-notify}
|
||||
$heat_config_script: {get_file: ../../../../../heat-agents/heat-config/os-refresh-config/configure.d/55-heat-config}
|
||||
$hook_script: {get_file: ../../../../../heat-agents/heat-config-script/install.d/hook-script.py}
|
||||
$heat_config_notify: {get_file: ../../../../../heat-agents/heat-config/bin/heat-config-notify}
|
||||
$occ_conf: {get_file: fragments/os-collect-config.conf}
|
||||
$orc_oac: {get_file: fragments/20-os-apply-config}
|
||||
template: {get_file: fragments/configure_config_agent.sh}
|
||||
|
|
|
@ -32,9 +32,9 @@ resources:
|
|||
config:
|
||||
str_replace:
|
||||
params:
|
||||
$heat_config_script: {get_file: ../../elements/heat-config/os-refresh-config/configure.d/55-heat-config}
|
||||
$hook_script: {get_file: ../../elements/heat-config-script/install.d/hook-script.py}
|
||||
$heat_config_notify: {get_file: ../../elements/heat-config/bin/heat-config-notify}
|
||||
$heat_config_script: {get_file: ../../../../../heat-agents/heat-config/os-refresh-config/configure.d/55-heat-config}
|
||||
$hook_script: {get_file: ../../../../../heat-agents/heat-config-script/install.d/hook-script.py}
|
||||
$heat_config_notify: {get_file: ../../../../../heat-agents/heat-config/bin/heat-config-notify}
|
||||
$occ_conf: {get_file: fragments/os-collect-config.conf}
|
||||
$orc_oac: {get_file: fragments/20-os-apply-config}
|
||||
template: {get_file: fragments/configure_config_agent.sh}
|
||||
|
@ -48,7 +48,7 @@ resources:
|
|||
- path: /var/lib/heat-config/hooks/puppet
|
||||
owner: "root:root"
|
||||
permissions: "0755"
|
||||
content: {get_file: ../../elements/heat-config-puppet/install.d/hook-puppet.py}
|
||||
content: {get_file: ../../../../../heat-agents/heat-config-puppet/install.d/hook-puppet.py}
|
||||
|
||||
install_cfn_init_hook:
|
||||
type: "OS::Heat::CloudConfig"
|
||||
|
@ -59,7 +59,7 @@ resources:
|
|||
- path: /var/lib/heat-config/hooks/cfn-init
|
||||
owner: "root:root"
|
||||
permissions: "0755"
|
||||
content: {get_file: ../../elements/heat-config-cfn-init/install.d/hook-cfn-init.py}
|
||||
content: {get_file: ../../../../../heat-agents/heat-config-cfn-init/install.d/hook-cfn-init.py}
|
||||
|
||||
start_config_agent:
|
||||
type: "OS::Heat::SoftwareConfig"
|
||||
|
|
|
@ -21,9 +21,9 @@ resources:
|
|||
config:
|
||||
str_replace:
|
||||
params:
|
||||
$heat_config_script: {get_file: ../../elements/heat-config/os-refresh-config/configure.d/55-heat-config}
|
||||
$hook_script: {get_file: ../../elements/heat-config-script/install.d/hook-script.py}
|
||||
$heat_config_notify: {get_file: ../../elements/heat-config/bin/heat-config-notify}
|
||||
$heat_config_script: {get_file: ../../../../../heat-agents/heat-config/os-refresh-config/configure.d/55-heat-config}
|
||||
$hook_script: {get_file: ../../../../../heat-agents/heat-config-script/install.d/hook-script.py}
|
||||
$heat_config_notify: {get_file: ../../../../../heat-agents/heat-config/bin/heat-config-notify}
|
||||
$occ_conf: {get_file: fragments/os-collect-config.conf}
|
||||
$orc_oac: {get_file: fragments/20-os-apply-config}
|
||||
template: {get_file: fragments/configure_config_agent.sh}
|
||||
|
|
|
@ -1,51 +0,0 @@
|
|||
============================
|
||||
Software configuration hooks
|
||||
============================
|
||||
|
||||
.. warning::
|
||||
All hooks (heat agents) in heat-templates repository are deprecated,
|
||||
please use hooks in `heat-agents https://git.openstack.org/cgit/openstack/heat-agents` instead.
|
||||
Here is document entry for heat-agents: `https://docs.openstack.org/heat-agents/latest/`
|
||||
|
||||
This directory contains `diskimage-builder <https://github.com/openstack/diskimage-builder>`_
|
||||
elements to build an image which contains the software configuration hook
|
||||
required to use your preferred configuration method.
|
||||
|
||||
These elements depend on some elements found in the
|
||||
`tripleo-image-elements <https://github.com/openstack/tripleo-image-elements>`_
|
||||
repository. These elements will build an image which uses
|
||||
`os-collect-config <https://github.com/openstack/os-collect-config>`_,
|
||||
`os-refresh-config <https://github.com/openstack/os-refresh-config>`_, and
|
||||
`os-apply-config <https://github.com/openstack/os-apply-config>`_ together to
|
||||
invoke a hook with the supplied configuration data, and return any outputs back
|
||||
to heat.
|
||||
|
||||
When building an image only the elements for the preferred configuration methods are required. The heat-config element is automatically included as a dependency.
|
||||
|
||||
An example fedora based image containing all hooks can be built and uploaded to glance
|
||||
with the following:
|
||||
|
||||
::
|
||||
|
||||
git clone https://git.openstack.org/openstack/diskimage-builder.git
|
||||
git clone https://git.openstack.org/openstack/tripleo-image-elements.git
|
||||
git clone https://git.openstack.org/openstack/heat-templates.git
|
||||
git clone https://git.openstack.org/openstack/dib-utils.git
|
||||
export PATH="${PWD}/dib-utils/bin:$PATH"
|
||||
export ELEMENTS_PATH=tripleo-image-elements/elements:heat-templates/hot/software-config/elements
|
||||
diskimage-builder/bin/disk-image-create vm \
|
||||
fedora selinux-permissive \
|
||||
os-collect-config \
|
||||
os-refresh-config \
|
||||
os-apply-config \
|
||||
heat-config \
|
||||
heat-config-ansible \
|
||||
heat-config-cfn-init \
|
||||
heat-config-docker-compose \
|
||||
heat-config-kubelet \
|
||||
heat-config-puppet \
|
||||
heat-config-salt \
|
||||
heat-config-script \
|
||||
-o fedora-software-config.qcow2
|
||||
openstack image create --disk-format qcow2 --container-format bare fedora-software-config < \
|
||||
fedora-software-config.qcow2
|
|
@ -1,4 +0,0 @@
|
|||
A hook which invokes ``ansible-playbook -i "localhost,"`` on the provided
|
||||
configuration. Config inputs are written to a 'variables.json' file and
|
||||
then passed to ansible via the '--extra-vars @json_file' parameter.
|
||||
Config output values are read from written-out files.
|
|
@ -1 +0,0 @@
|
|||
heat-config
|
|
@ -1,7 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
install-packages ansible
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-ansible.py /var/lib/heat-config/hooks/ansible
|
|
@ -1,133 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
WORKING_DIR = os.environ.get('HEAT_ANSIBLE_WORKING',
|
||||
'/var/lib/heat-config/heat-config-ansible')
|
||||
OUTPUTS_DIR = os.environ.get('HEAT_ANSIBLE_OUTPUTS',
|
||||
'/var/run/heat-config/heat-config-ansible')
|
||||
ANSIBLE_CMD = os.environ.get('HEAT_ANSIBLE_CMD', 'ansible-playbook')
|
||||
ANSIBLE_INVENTORY = os.environ.get('HEAT_ANSIBLE_INVENTORY', 'localhost,')
|
||||
|
||||
|
||||
def prepare_dir(path):
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
prepare_dir(OUTPUTS_DIR)
|
||||
prepare_dir(WORKING_DIR)
|
||||
os.chdir(WORKING_DIR)
|
||||
|
||||
c = json.load(sys.stdin)
|
||||
|
||||
variables = {}
|
||||
for input in c['inputs']:
|
||||
variables[input['name']] = input.get('value', '')
|
||||
|
||||
tags = c['options'].get('tags')
|
||||
modulepath = c['options'].get('modulepath')
|
||||
|
||||
fn = os.path.join(WORKING_DIR, '%s_playbook.yaml' % c['id'])
|
||||
vars_filename = os.path.join(WORKING_DIR, '%s_variables.json' % c['id'])
|
||||
heat_outputs_path = os.path.join(OUTPUTS_DIR, c['id'])
|
||||
variables['heat_outputs_path'] = heat_outputs_path
|
||||
|
||||
config_text = c.get('config', '')
|
||||
if not config_text:
|
||||
log.warn("No 'config' input found, nothing to do.")
|
||||
return
|
||||
# Write 'variables' to file
|
||||
with os.fdopen(os.open(
|
||||
vars_filename, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as var_file:
|
||||
json.dump(variables, var_file)
|
||||
# Write the executable, 'config', to file
|
||||
with os.fdopen(os.open(fn, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
|
||||
f.write(c.get('config', '').encode('utf-8'))
|
||||
|
||||
cmd = [
|
||||
ANSIBLE_CMD,
|
||||
'-i',
|
||||
ANSIBLE_INVENTORY,
|
||||
fn,
|
||||
'--extra-vars',
|
||||
'@%s' % vars_filename
|
||||
]
|
||||
if tags:
|
||||
cmd.insert(3, '--tags')
|
||||
cmd.insert(4, tags)
|
||||
if modulepath:
|
||||
cmd.insert(3, '--module-path')
|
||||
cmd.insert(4, modulepath)
|
||||
|
||||
log.debug('Running %s' % (' '.join(cmd),))
|
||||
try:
|
||||
subproc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
except OSError:
|
||||
log.warn("ansible not installed yet")
|
||||
return
|
||||
stdout, stderr = subproc.communicate()
|
||||
|
||||
log.info('Return code %s' % subproc.returncode)
|
||||
if stdout:
|
||||
log.info(stdout)
|
||||
if stderr:
|
||||
log.info(stderr)
|
||||
|
||||
# TODO(stevebaker): Test if ansible returns any non-zero
|
||||
# return codes in success.
|
||||
if subproc.returncode:
|
||||
log.error("Error running %s. [%s]\n" % (fn, subproc.returncode))
|
||||
else:
|
||||
log.info('Completed %s' % fn)
|
||||
|
||||
response = {}
|
||||
|
||||
for output in c.get('outputs') or []:
|
||||
output_name = output['name']
|
||||
try:
|
||||
with open('%s.%s' % (heat_outputs_path, output_name)) as out:
|
||||
response[output_name] = out.read()
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
response.update({
|
||||
'deploy_stdout': stdout,
|
||||
'deploy_stderr': stderr,
|
||||
'deploy_status_code': subproc.returncode,
|
||||
})
|
||||
|
||||
json.dump(response, sys.stdout)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,14 +0,0 @@
|
|||
A hook which invokes os-apply-config.
|
||||
|
||||
The intent is for this element (hook script) to be used in place of the one in
|
||||
tripleo-image-elements which relies on an external signal handling
|
||||
shell script at the end of the os-refresh-config run (99-refresh-completed).
|
||||
This version will run os-apply-config and return a signal immediately. Because
|
||||
it uses the heat-hook mechanisms it also supports a broader set of signal
|
||||
handling capabilities... which 99-refresh-completed doesn't fully support.
|
||||
|
||||
It is worth noting that this hook runs os-apply-config against all the
|
||||
accumulated metadata, not just data supplied to an individual hook.
|
||||
|
||||
To use this hook set group: to 'apply-config' instead of 'os-apply-config'
|
||||
in your Heat software configuration resources.
|
|
@ -1 +0,0 @@
|
|||
heat-config
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-apply-config.py /var/lib/heat-config/hooks/apply-config
|
|
@ -1,60 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
APPLY_CONFIG_CMD = os.environ.get('HEAT_APPLY_CONFIG_CMD', 'os-apply-config')
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
env = os.environ.copy()
|
||||
|
||||
log.debug('Running %s' % APPLY_CONFIG_CMD)
|
||||
subproc = subprocess.Popen([APPLY_CONFIG_CMD], stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE, env=env)
|
||||
stdout, stderr = subproc.communicate()
|
||||
|
||||
log.info(stdout)
|
||||
log.debug(stderr)
|
||||
|
||||
if subproc.returncode:
|
||||
log.error("Error running apply-config: [%s]\n" % subproc.returncode)
|
||||
else:
|
||||
log.info('Completed apply-config.')
|
||||
|
||||
response = {
|
||||
'deploy_stdout': stdout,
|
||||
'deploy_stderr': stderr,
|
||||
'deploy_status_code': subproc.returncode,
|
||||
}
|
||||
|
||||
json.dump(response, sys.stdout)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,3 +0,0 @@
|
|||
A hook which consumes configuration in the format of AWS::CloudFormation::Init
|
||||
metadata. It is provided to enable migrating from CloudFormation metadata
|
||||
configuration to configuration using config and deployment resources.
|
|
@ -1 +0,0 @@
|
|||
heat-config
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-cfn-init.py /var/lib/heat-config/hooks/cfn-init
|
|
@ -1,86 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
||||
# Ideally this path would be /var/lib/heat-cfntools/cfn-init-data
|
||||
# but this is where all boot metadata is stored
|
||||
LAST_METADATA_DIR = os.environ.get('HEAT_CFN_INIT_LAST_METADATA_DIR',
|
||||
'/var/cache/heat-cfntools')
|
||||
|
||||
|
||||
CFN_INIT_CMD = os.environ.get('HEAT_CFN_INIT_CMD',
|
||||
'cfn-init')
|
||||
|
||||
|
||||
def main(argv=sys.argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
c = json.load(stdin)
|
||||
|
||||
config = c.get('config', {})
|
||||
if not isinstance(config, dict):
|
||||
config = json.loads(config)
|
||||
meta = {'AWS::CloudFormation::Init': config}
|
||||
|
||||
if not os.path.isdir(LAST_METADATA_DIR):
|
||||
os.makedirs(LAST_METADATA_DIR, 0o700)
|
||||
|
||||
fn = os.path.join(LAST_METADATA_DIR, 'last_metadata')
|
||||
with os.fdopen(os.open(fn, os.O_CREAT | os.O_WRONLY | os.O_TRUNC, 0o700),
|
||||
'w') as f:
|
||||
json.dump(meta, f)
|
||||
|
||||
log.debug('Running %s' % CFN_INIT_CMD)
|
||||
subproc = subprocess.Popen([CFN_INIT_CMD], stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
cstdout, cstderr = subproc.communicate()
|
||||
|
||||
if cstdout:
|
||||
log.info(cstdout)
|
||||
if cstderr:
|
||||
log.info(cstderr)
|
||||
|
||||
if subproc.returncode:
|
||||
log.error("Error running %s. [%s]\n" % (
|
||||
CFN_INIT_CMD, subproc.returncode))
|
||||
else:
|
||||
log.info('Completed %s' % CFN_INIT_CMD)
|
||||
|
||||
response = {
|
||||
'deploy_stdout': cstdout,
|
||||
'deploy_stderr': cstderr,
|
||||
'deploy_status_code': subproc.returncode,
|
||||
}
|
||||
|
||||
json.dump(response, stdout)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
|
@ -1,36 +0,0 @@
|
|||
A hook which invokes ``chef-client`` in local mode (chef zero) on the
|
||||
provided configuration.
|
||||
|
||||
Inputs:
|
||||
-------
|
||||
Inputs are attribute overrides. In order to format them correctly for
|
||||
consumption, you need to explicitly declare each top-level section as an
|
||||
input of type ``Json`` in your config resource.
|
||||
|
||||
Additionally, there is a special input named ``environment`` of type
|
||||
``String`` that you can use to specify which environment to use when
|
||||
applying the config. You do not have to explicitly declare this input in
|
||||
the config resource.
|
||||
|
||||
Outputs:
|
||||
--------
|
||||
If you need to capture specific outputs from your chef run, you should
|
||||
specify the output name(s) as normal in your config. Then, your recipes
|
||||
should write files to the directory specified by the ``heat_outputs_path``
|
||||
environment variable. The file name should match the name of the output
|
||||
you are trying to capture.
|
||||
|
||||
Options:
|
||||
-------------
|
||||
|
||||
kitchen : optional
|
||||
A URL for a Git repository containing the desired recipes, roles,
|
||||
environments and other configuration.
|
||||
|
||||
This will be cloned into ``kitchen_path`` for use by chef.
|
||||
|
||||
kitchen_path : default ``/var/lib/heat-config/heat-config-chef/kitchen``
|
||||
Instance-local path for the recipes, roles, environments, etc.
|
||||
|
||||
If ``kitchen`` is not specified, this directory must be populated via
|
||||
user-data, another software config, or other "manual" method.
|
|
@ -1 +0,0 @@
|
|||
heat-config
|
|
@ -1,7 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
install-packages chef git
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-chef.py /var/lib/heat-config/hooks/chef
|
|
@ -1,165 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import six
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
DEPLOY_KEYS = ("deploy_server_id",
|
||||
"deploy_action",
|
||||
"deploy_stack_id",
|
||||
"deploy_resource_name",
|
||||
"deploy_signal_transport",
|
||||
"deploy_signal_id",
|
||||
"deploy_signal_verb")
|
||||
WORKING_DIR = os.environ.get('HEAT_CHEF_WORKING',
|
||||
'/var/lib/heat-config/heat-config-chef')
|
||||
OUTPUTS_DIR = os.environ.get('HEAT_CHEF_OUTPUTS',
|
||||
'/var/run/heat-config/heat-config-chef')
|
||||
|
||||
|
||||
def prepare_dir(path):
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
|
||||
def run_subproc(fn, **kwargs):
|
||||
env = os.environ.copy()
|
||||
for k, v in kwargs.items():
|
||||
env[six.text_type(k)] = v
|
||||
try:
|
||||
subproc = subprocess.Popen(fn, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
env=env)
|
||||
stdout, stderr = subproc.communicate()
|
||||
except OSError as exc:
|
||||
ret = -1
|
||||
stderr = six.text_type(exc)
|
||||
stdout = ""
|
||||
else:
|
||||
ret = subproc.returncode
|
||||
if not ret:
|
||||
ret = 0
|
||||
return ret, stdout, stderr
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
prepare_dir(OUTPUTS_DIR)
|
||||
prepare_dir(WORKING_DIR)
|
||||
os.chdir(WORKING_DIR)
|
||||
|
||||
c = json.load(sys.stdin)
|
||||
|
||||
client_config = ("log_level :debug\n"
|
||||
"log_location STDOUT\n"
|
||||
"local_mode true\n"
|
||||
"chef_zero.enabled true")
|
||||
|
||||
# configure/set up the kitchen
|
||||
kitchen = c['options'].get('kitchen')
|
||||
kitchen_path = c['options'].get('kitchen_path', os.path.join(WORKING_DIR,
|
||||
"kitchen"))
|
||||
cookbook_path = os.path.join(kitchen_path, "cookbooks")
|
||||
role_path = os.path.join(kitchen_path, "roles")
|
||||
environment_path = os.path.join(kitchen_path, "environments")
|
||||
client_config += "\ncookbook_path '%s'" % cookbook_path
|
||||
client_config += "\nrole_path '%s'" % role_path
|
||||
client_config += "\nenvironment_path '%s'" % environment_path
|
||||
if kitchen:
|
||||
log.debug("Cloning kitchen from %s", kitchen)
|
||||
# remove the existing kitchen on update so we get a fresh clone
|
||||
dep_action = next((input['value'] for input in c['inputs']
|
||||
if input['name'] == "deploy_action"), None)
|
||||
if dep_action == "UPDATE":
|
||||
shutil.rmtree(kitchen_path, ignore_errors=True)
|
||||
cmd = ["git", "clone", kitchen, kitchen_path]
|
||||
ret, out, err = run_subproc(cmd)
|
||||
if ret != 0:
|
||||
log.error("Error cloning kitchen from %s into %s: %s", kitchen,
|
||||
kitchen_path, err)
|
||||
json.dump({'deploy_status_code': ret,
|
||||
'deploy_stdout': out,
|
||||
'deploy_stderr': err},
|
||||
sys.stdout)
|
||||
return 0
|
||||
|
||||
# write the json attributes
|
||||
ret, out, err = run_subproc(['hostname', '-f'])
|
||||
if ret == 0:
|
||||
fqdn = out.strip()
|
||||
else:
|
||||
err = "Could not determine hostname with hostname -f"
|
||||
json.dump({'deploy_status_code': ret,
|
||||
'deploy_stdout': "",
|
||||
'deploy_stderr': err}, sys.stdout)
|
||||
return 0
|
||||
node_config = {}
|
||||
for input in c['inputs']:
|
||||
if input['name'] == 'environment':
|
||||
client_config += "\nenvironment '%s'" % input['value']
|
||||
elif input['name'] not in DEPLOY_KEYS:
|
||||
node_config.update({input['name']: input['value']})
|
||||
node_config.update({"run_list": json.loads(c['config'])})
|
||||
node_path = os.path.join(WORKING_DIR, "node")
|
||||
prepare_dir(node_path)
|
||||
node_file = os.path.join(node_path, "%s.json" % fqdn)
|
||||
with os.fdopen(os.open(node_file, os.O_CREAT | os.O_WRONLY, 0o600),
|
||||
'w') as f:
|
||||
f.write(json.dumps(node_config, indent=4))
|
||||
client_config += "\nnode_path '%s'" % node_path
|
||||
|
||||
# write out the completed client config
|
||||
config_path = os.path.join(WORKING_DIR, "client.rb")
|
||||
with os.fdopen(os.open(config_path, os.O_CREAT | os.O_WRONLY, 0o600),
|
||||
'w') as f:
|
||||
f.write(client_config)
|
||||
|
||||
# run chef
|
||||
heat_outputs_path = os.path.join(OUTPUTS_DIR, c['id'])
|
||||
cmd = ['chef-client', '-z', '--config', config_path, "-j", node_file]
|
||||
ret, out, err = run_subproc(cmd, heat_outputs_path=heat_outputs_path)
|
||||
resp = {'deploy_status_code': ret,
|
||||
'deploy_stdout': out,
|
||||
'deploy_stderr': err}
|
||||
log.debug("Chef output: %s", out)
|
||||
if err:
|
||||
log.error("Chef return code %s:\n%s", ret, err)
|
||||
for output in c.get('outputs', []):
|
||||
output_name = output['name']
|
||||
try:
|
||||
with open('%s.%s' % (heat_outputs_path, output_name)) as out:
|
||||
resp[output_name] = out.read()
|
||||
except IOError:
|
||||
pass
|
||||
json.dump(resp, sys.stdout)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,9 +0,0 @@
|
|||
A hook which uses the `docker` command to deploy containers.
|
||||
|
||||
The hook currently supports specifying containers in the `docker-compose v1
|
||||
format <https://docs.docker.com/compose/compose-file/#/version-1>`_. The
|
||||
intention is for this hook to also support the kubernetes pod format.
|
||||
|
||||
A dedicated os-refresh-config script will remove running containers if a
|
||||
deployment is removed or changed, then the docker-cmd hook will run any
|
||||
containers in new or updated deployments.
|
|
@ -1,2 +0,0 @@
|
|||
os-apply-config
|
||||
os-refresh-config
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-docker-cmd.py /var/lib/heat-config/hooks/docker-cmd
|
|
@ -1,143 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import six
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
import yaml
|
||||
|
||||
|
||||
DOCKER_CMD = os.environ.get('HEAT_DOCKER_CMD', 'docker')
|
||||
|
||||
|
||||
log = None
|
||||
|
||||
|
||||
def build_response(deploy_stdout, deploy_stderr, deploy_status_code):
|
||||
return {
|
||||
'deploy_stdout': deploy_stdout,
|
||||
'deploy_stderr': deploy_stderr,
|
||||
'deploy_status_code': deploy_status_code,
|
||||
}
|
||||
|
||||
|
||||
def docker_arg_map(key, value):
|
||||
value = str(value).encode('ascii', 'ignore')
|
||||
return {
|
||||
'container_step_config': None,
|
||||
'environment': "--env=%s" % value,
|
||||
'image': value,
|
||||
'net': "--net=%s" % value,
|
||||
'pid': "--pid=%s" % value,
|
||||
'privileged': "--privileged=%s" % 'true' if value else 'false',
|
||||
'restart': "--restart=%s" % value,
|
||||
'user': "--user=%s" % value,
|
||||
'volumes': "--volume=%s" % value,
|
||||
'volumes_from': "--volumes-from=%s" % value,
|
||||
}.get(key, None)
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
|
||||
global log
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
c = json.load(sys.stdin)
|
||||
|
||||
input_values = dict((i['name'], i['value']) for i in c.get('inputs', {}))
|
||||
|
||||
if input_values.get('deploy_action') == 'DELETE':
|
||||
json.dump(build_response(
|
||||
'', '', 0), sys.stdout)
|
||||
return
|
||||
|
||||
config = c.get('config', '')
|
||||
if not config:
|
||||
log.debug("No 'config' input found, nothing to do.")
|
||||
json.dump(build_response(
|
||||
'', '', 0), sys.stdout)
|
||||
return
|
||||
|
||||
stdout = []
|
||||
stderr = []
|
||||
deploy_status_code = 0
|
||||
|
||||
# convert config to dict
|
||||
if not isinstance(config, dict):
|
||||
config = yaml.safe_load(config)
|
||||
|
||||
for container in sorted(config):
|
||||
container_name = '%s__%s' % (c['name'], container)
|
||||
cmd = [
|
||||
DOCKER_CMD,
|
||||
'run',
|
||||
'--detach=true',
|
||||
'--name',
|
||||
container_name.encode('ascii', 'ignore'),
|
||||
]
|
||||
image_name = ''
|
||||
for key in sorted(config[container]):
|
||||
# These ones contain a list of values
|
||||
if key in ['environment', 'volumes', 'volumes_from']:
|
||||
for value in config[container][key]:
|
||||
# Somehow the lists get empty values sometimes
|
||||
if type(value) is six.text_type and not value.strip():
|
||||
continue
|
||||
cmd.append(docker_arg_map(key, value))
|
||||
elif key == 'image':
|
||||
image_name = config[container][key].encode('ascii', 'ignore')
|
||||
else:
|
||||
arg = docker_arg_map(key, config[container][key])
|
||||
if arg:
|
||||
cmd.append(arg)
|
||||
|
||||
# Image name must come last.
|
||||
cmd.append(image_name)
|
||||
|
||||
log.debug(' '.join(cmd))
|
||||
subproc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
cmd_stdout, cmd_stderr = subproc.communicate()
|
||||
log.debug(cmd_stdout)
|
||||
log.debug(cmd_stderr)
|
||||
if cmd_stdout:
|
||||
stdout.append(cmd_stdout)
|
||||
if cmd_stderr:
|
||||
stderr.append(cmd_stderr)
|
||||
|
||||
if subproc.returncode:
|
||||
log.error("Error running %s. [%s]\n" % (cmd, subproc.returncode))
|
||||
else:
|
||||
log.debug('Completed %s' % cmd)
|
||||
|
||||
if subproc.returncode != 0:
|
||||
deploy_status_code = subproc.returncode
|
||||
|
||||
json.dump(build_response(
|
||||
'\n'.join(stdout), '\n'.join(stderr), deploy_status_code), sys.stdout)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,145 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
CONF_FILE = os.environ.get('HEAT_SHELL_CONFIG',
|
||||
'/var/run/heat-config/heat-config')
|
||||
|
||||
WORKING_DIR = os.environ.get(
|
||||
'HEAT_DOCKER_CMD_WORKING',
|
||||
'/var/lib/heat-config/heat-config-docker-cmd')
|
||||
|
||||
DOCKER_CMD = os.environ.get('HEAT_DOCKER_CMD', 'docker')
|
||||
|
||||
|
||||
log = None
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
global log
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
if not os.path.exists(CONF_FILE):
|
||||
log.warning('No config file %s' % CONF_FILE)
|
||||
return 1
|
||||
|
||||
if not os.path.isdir(WORKING_DIR):
|
||||
os.makedirs(WORKING_DIR, 0o700)
|
||||
|
||||
try:
|
||||
configs = json.load(open(CONF_FILE))
|
||||
except ValueError as e:
|
||||
log.warning('Could not load config json: %s' % e)
|
||||
return 1
|
||||
|
||||
cmd_configs = list(build_configs(configs))
|
||||
try:
|
||||
delete_missing_projects(cmd_configs)
|
||||
for c in cmd_configs:
|
||||
delete_changed_project(c)
|
||||
write_project(c)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
|
||||
|
||||
def build_configs(configs):
|
||||
for c in configs:
|
||||
if c['group'] != 'docker-cmd':
|
||||
continue
|
||||
if not isinstance(c['config'], dict):
|
||||
# convert config to dict
|
||||
c['config'] = yaml.safe_load(c['config'])
|
||||
yield c
|
||||
|
||||
|
||||
def current_projects():
|
||||
for proj_file in os.listdir(WORKING_DIR):
|
||||
if proj_file.endswith('.json'):
|
||||
proj = proj_file[:-5]
|
||||
yield proj
|
||||
|
||||
|
||||
def remove_project(proj):
|
||||
proj_file = os.path.join(WORKING_DIR, '%s.json' % proj)
|
||||
with open(proj_file, 'r') as f:
|
||||
proj_data = json.load(f)
|
||||
for name in extract_container_names(proj, proj_data):
|
||||
remove_container(name)
|
||||
os.remove(proj_file)
|
||||
|
||||
|
||||
def remove_container(name):
|
||||
cmd = [DOCKER_CMD, 'rm', '-f', name]
|
||||
log.debug(' '.join(cmd))
|
||||
subproc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
stdout, stderr = subproc.communicate()
|
||||
log.info(stdout)
|
||||
log.debug(stderr)
|
||||
|
||||
|
||||
def delete_missing_projects(configs):
|
||||
config_names = [c['name'] for c in configs]
|
||||
for proj in current_projects():
|
||||
if proj not in config_names:
|
||||
log.debug('%s no longer exists, deleting containers' % proj)
|
||||
remove_project(proj)
|
||||
|
||||
|
||||
def extract_container_names(proj, proj_data):
|
||||
# For now, assume a docker-compose v1 format where the
|
||||
# root keys are service names
|
||||
for name in sorted(proj_data):
|
||||
yield '%s__%s' % (proj, name)
|
||||
|
||||
|
||||
def delete_changed_project(c):
|
||||
proj = c['name']
|
||||
proj_file = os.path.join(WORKING_DIR, '%s.json' % proj)
|
||||
proj_data = c.get('config', {})
|
||||
if os.path.isfile(proj_file):
|
||||
with open(proj_file, 'r') as f:
|
||||
prev_proj_data = json.load(f)
|
||||
if proj_data != prev_proj_data:
|
||||
log.debug('%s has changed, deleting containers' % proj)
|
||||
remove_project(proj)
|
||||
|
||||
|
||||
def write_project(c):
|
||||
proj = c['name']
|
||||
proj_file = os.path.join(WORKING_DIR, '%s.json' % proj)
|
||||
proj_data = c.get('config', {})
|
||||
|
||||
with os.fdopen(os.open(
|
||||
proj_file, os.O_CREAT | os.O_WRONLY | os.O_TRUNC, 0o600),
|
||||
'w') as f:
|
||||
json.dump(proj_data, f, indent=2)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,18 +0,0 @@
|
|||
A hook which uses `docker-compose` to deploy containers.
|
||||
|
||||
A special input 'env_files' can be used with SoftwareConfig and
|
||||
StructuredConfig for docker-compose `env_file` key(s).
|
||||
|
||||
if env_file keys specified in the `docker-compose.yml`, do not
|
||||
exist in input_values supplied, docker-compose will throw an
|
||||
error, as it can't find these files.
|
||||
|
||||
Also, `--parameter-file` option can be used to pass env files from client.
|
||||
|
||||
Example:
|
||||
|
||||
$ openstack stack create test_stack -t example-docker-compose-template.yaml \
|
||||
--parameter-file env_file_0=./common.env \
|
||||
--parameter-file env_file_1=./apps/web.env \
|
||||
--parameter-file env_file_2=./test.env \
|
||||
--parameter-file env_file_3=./busybox.env
|
|
@ -1,2 +0,0 @@
|
|||
os-apply-config
|
||||
os-refresh-config
|
|
@ -1,17 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
if [ -f /etc/debian_version ]; then
|
||||
install-packages docker.io
|
||||
update-rc.d docker.io defaults
|
||||
|
||||
elif [ -f /etc/redhat-release ]; then
|
||||
yum -y install docker-io
|
||||
systemctl enable docker.service
|
||||
fi
|
||||
|
||||
pip install -U dpath docker-compose==1.4.0
|
||||
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-docker-compose.py /var/lib/heat-config/hooks/docker-compose
|
|
@ -1,131 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ast
|
||||
import dpath
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import six
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
import yaml
|
||||
|
||||
|
||||
WORKING_DIR = os.environ.get('HEAT_DOCKER_COMPOSE_WORKING',
|
||||
'/var/lib/heat-config/heat-config-docker-compose')
|
||||
|
||||
DOCKER_COMPOSE_CMD = os.environ.get('HEAT_DOCKER_COMPOSE_CMD',
|
||||
'docker-compose')
|
||||
|
||||
|
||||
def prepare_dir(path):
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
|
||||
def write_input_file(file_path, content):
|
||||
prepare_dir(os.path.dirname(file_path))
|
||||
with os.fdopen(os.open(
|
||||
file_path, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
|
||||
f.write(content.encode('utf-8'))
|
||||
|
||||
|
||||
def build_response(deploy_stdout, deploy_stderr, deploy_status_code):
|
||||
return {
|
||||
'deploy_stdout': deploy_stdout,
|
||||
'deploy_stderr': deploy_stderr,
|
||||
'deploy_status_code': deploy_status_code,
|
||||
}
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
c = json.load(sys.stdin)
|
||||
|
||||
input_values = dict((i['name'], i['value']) for i in c['inputs'])
|
||||
|
||||
proj = os.path.join(WORKING_DIR, c.get('name'))
|
||||
prepare_dir(proj)
|
||||
|
||||
stdout, stderr = {}, {}
|
||||
|
||||
if input_values.get('deploy_action') == 'DELETE':
|
||||
json.dump(build_response(stdout, stderr, 0), sys.stdout)
|
||||
return
|
||||
|
||||
config = c.get('config', '')
|
||||
if not config:
|
||||
log.debug("No 'config' input found, nothing to do.")
|
||||
json.dump(build_response(stdout, stderr, 0), sys.stdout)
|
||||
return
|
||||
|
||||
# convert config to dict
|
||||
if not isinstance(config, dict):
|
||||
config = ast.literal_eval(json.dumps(yaml.safe_load(config)))
|
||||
|
||||
os.chdir(proj)
|
||||
|
||||
compose_env_files = []
|
||||
for value in dpath.util.values(config, '*/env_file'):
|
||||
if isinstance(value, list):
|
||||
compose_env_files.extend(value)
|
||||
elif isinstance(value, six.string_types):
|
||||
compose_env_files.extend([value])
|
||||
|
||||
input_env_files = {}
|
||||
if input_values.get('env_files'):
|
||||
input_env_files = dict(
|
||||
(i['file_name'], i['content'])
|
||||
for i in ast.literal_eval(input_values.get('env_files')))
|
||||
|
||||
for file in compose_env_files:
|
||||
if file in input_env_files.keys():
|
||||
write_input_file(file, input_env_files.get(file))
|
||||
|
||||
cmd = [
|
||||
DOCKER_COMPOSE_CMD,
|
||||
'up',
|
||||
'-d',
|
||||
'--no-build',
|
||||
]
|
||||
|
||||
log.debug('Running %s' % cmd)
|
||||
|
||||
subproc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
stdout, stderr = subproc.communicate()
|
||||
|
||||
log.debug(stdout)
|
||||
log.debug(stderr)
|
||||
|
||||
if subproc.returncode:
|
||||
log.error("Error running %s. [%s]\n" % (cmd, subproc.returncode))
|
||||
else:
|
||||
log.debug('Completed %s' % cmd)
|
||||
|
||||
json.dump(build_response(stdout, stderr, subproc.returncode), sys.stdout)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,116 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
CONF_FILE = os.environ.get('HEAT_SHELL_CONFIG',
|
||||
'/var/run/heat-config/heat-config')
|
||||
|
||||
DOCKER_COMPOSE_DIR = os.environ.get(
|
||||
'HEAT_DOCKER_COMPOSE_WORKING',
|
||||
'/var/lib/heat-config/heat-config-docker-compose')
|
||||
|
||||
DOCKER_COMPOSE_CMD = os.environ.get('HEAT_DOCKER_COMPOSE_CMD',
|
||||
'docker-compose')
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
if not os.path.exists(CONF_FILE):
|
||||
log.error('No config file %s' % CONF_FILE)
|
||||
return 1
|
||||
|
||||
if not os.path.isdir(DOCKER_COMPOSE_DIR):
|
||||
os.makedirs(DOCKER_COMPOSE_DIR, 0o700)
|
||||
|
||||
try:
|
||||
configs = json.load(open(CONF_FILE))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
cleanup_stale_projects(configs)
|
||||
for c in configs:
|
||||
write_compose_config(c)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
|
||||
|
||||
def cleanup_stale_projects(configs):
|
||||
def deployments(configs):
|
||||
for c in configs:
|
||||
yield c['name']
|
||||
|
||||
def compose_projects(compose_dir):
|
||||
for proj in os.listdir(compose_dir):
|
||||
if os.path.isfile(
|
||||
os.path.join(DOCKER_COMPOSE_DIR,
|
||||
'%s/docker-compose.yml' % proj)):
|
||||
yield proj
|
||||
|
||||
def cleanup_containers(project):
|
||||
cmd = [
|
||||
DOCKER_COMPOSE_CMD,
|
||||
'kill'
|
||||
]
|
||||
subproc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
stdout, stderr = subproc.communicate()
|
||||
|
||||
for proj in compose_projects(DOCKER_COMPOSE_DIR):
|
||||
if proj not in deployments(configs):
|
||||
proj_dir = os.path.join(DOCKER_COMPOSE_DIR, proj)
|
||||
os.chdir(proj_dir)
|
||||
cleanup_containers(proj)
|
||||
os.remove('%s/docker-compose.yml' % proj_dir)
|
||||
|
||||
|
||||
def write_compose_config(c):
|
||||
group = c.get('group')
|
||||
if group != 'docker-compose':
|
||||
return
|
||||
|
||||
def prepare_dir(path):
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
compose_conf = c.get('config', '')
|
||||
if isinstance(compose_conf, dict):
|
||||
yaml_config = yaml.safe_dump(compose_conf, default_flow_style=False)
|
||||
else:
|
||||
yaml_config = compose_conf
|
||||
proj_dir = os.path.join(DOCKER_COMPOSE_DIR, c['name'])
|
||||
prepare_dir(proj_dir)
|
||||
fn = os.path.join(proj_dir, 'docker-compose.yml')
|
||||
with os.fdopen(os.open(fn, os.O_CREAT | os.O_WRONLY | os.O_TRUNC, 0o600),
|
||||
'w') as f:
|
||||
f.write(yaml_config.encode('utf-8'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,25 +0,0 @@
|
|||
A hook which helps write hiera files to disk and creates
|
||||
the hiera.yaml to order them. This is typically used alongside
|
||||
of the puppet hook to generate Hiera in a more composable manner.
|
||||
|
||||
Example:
|
||||
|
||||
ComputeConfig:
|
||||
type: OS::Heat::StructuredConfig
|
||||
properties:
|
||||
group: hiera
|
||||
config:
|
||||
hierarchy:
|
||||
- compute
|
||||
datafiles:
|
||||
compute:
|
||||
debug: true
|
||||
db_connection: foo:/bar
|
||||
# customized hiera goes here...
|
||||
|
||||
This would write out:
|
||||
|
||||
1) An /etc/hiera.yaml config file with compute in the hierarchy.
|
||||
|
||||
2) An /etc/puppet/hieradata/compute.json file loaded with the
|
||||
custom hiera data.
|
|
@ -1 +0,0 @@
|
|||
heat-config
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
install-packages hiera
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-hiera.py /var/lib/heat-config/hooks/hiera
|
||||
|
||||
ln -f -s /etc/puppet/hiera.yaml /etc/hiera.yaml
|
|
@ -1,88 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
||||
HIERA_DATADIR = os.environ.get('HEAT_PUPPET_HIERA_DATADIR',
|
||||
'/etc/puppet/hieradata')
|
||||
HIERA_CONFIG = os.environ.get('HEAT_HIERA_CONFIG', '/etc/puppet/hiera.yaml')
|
||||
|
||||
HIERA_CONFIG_BASE = """
|
||||
---
|
||||
:backends:
|
||||
- json
|
||||
:json:
|
||||
:datadir: %(datadir)s
|
||||
:hierarchy:
|
||||
""" % {'datadir': HIERA_DATADIR}
|
||||
|
||||
|
||||
def prepare_dir(path):
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
c = json.load(sys.stdin)['config']
|
||||
|
||||
prepare_dir(HIERA_DATADIR)
|
||||
|
||||
hiera_config_file = os.path.join(HIERA_CONFIG)
|
||||
|
||||
# allow the end user to order the hiera config as they wish
|
||||
if 'hierarchy' in c:
|
||||
with os.fdopen(os.open(hiera_config_file,
|
||||
os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600),
|
||||
'w') as config_file:
|
||||
config_file.write(HIERA_CONFIG_BASE)
|
||||
for item in c['hierarchy']:
|
||||
config_file.write(' - %s\n' % item)
|
||||
|
||||
# write out the datafiles as YAML
|
||||
if 'datafiles' in c:
|
||||
for name, data in c['datafiles'].iteritems():
|
||||
hiera_data = os.path.join(HIERA_DATADIR, '%s.json' % name)
|
||||
with os.fdopen(os.open(hiera_data,
|
||||
os.O_CREAT | os.O_TRUNC | os.O_WRONLY,
|
||||
0o600),
|
||||
'w') as hiera_data_file:
|
||||
json.dump(data, hiera_data_file, indent=4, sort_keys=True)
|
||||
|
||||
response = {
|
||||
'deploy_stdout': '',
|
||||
'deploy_stderr': '',
|
||||
'deploy_status_code': 0,
|
||||
}
|
||||
|
||||
json.dump(response, sys.stdout)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,19 +0,0 @@
|
|||
A hook which helps write JSON files to disk for configuration or use
|
||||
with ad-hoc scripts. The data files are written to the named file
|
||||
location for each section listed under 'config'.
|
||||
|
||||
Multiple JSON files can be written out in this manner.
|
||||
|
||||
Example:
|
||||
|
||||
JsonConfig:
|
||||
type: OS::Heat::StructuredConfig
|
||||
properties:
|
||||
group: json-file
|
||||
config:
|
||||
/tmp/foo:
|
||||
- bar
|
||||
- bar2
|
||||
|
||||
This would write out a JSON files at
|
||||
/tmp/foo containing a JSON representation of ['bar', 'bar2'].
|
|
@ -1 +0,0 @@
|
|||
heat-config
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-json-file.py /var/lib/heat-config/hooks/json-file
|
|
@ -1,55 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
||||
def prepare_dir(path):
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
c = json.load(sys.stdin)['config']
|
||||
|
||||
for fname in c.keys():
|
||||
prepare_dir(os.path.dirname(fname))
|
||||
data = c.get(fname)
|
||||
with open(fname, 'w') as json_data_file:
|
||||
json.dump(data, json_data_file, indent=4, sort_keys=True)
|
||||
|
||||
response = {
|
||||
'deploy_stdout': '',
|
||||
'deploy_stderr': '',
|
||||
'deploy_status_code': 0,
|
||||
}
|
||||
|
||||
json.dump(response, sys.stdout)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,22 +0,0 @@
|
|||
This hook uses the kubelet agent from the kubernetes project to provision
|
||||
containers. The StructuredConfig resource data represents a pod of containers
|
||||
to be provisioned.
|
||||
|
||||
The files have the following purpose:
|
||||
|
||||
- extra-data.d/50-docker-images allows an archive file of docker images to
|
||||
be included in the dib image
|
||||
|
||||
- install.d/50-heat-config-kubelet installs kubernetes for redhat based
|
||||
distros during dib image build, along with the required systemd and config
|
||||
files required to enable a working kubelet service on the host
|
||||
|
||||
- install.d/hook-kubelet.py polls docker images and containers until the
|
||||
expected kubelet-provisioned containers are running (or a timeout occurs)
|
||||
|
||||
- os-refresh-config/configure.d/50-heat-config-kubelet runs before
|
||||
55-heat-config (and the kubelet hook it triggers). This orc script writes
|
||||
out all pod definition files for the pods that should currently be running.
|
||||
Kubelet is configured to monitor the directory containing these files, so
|
||||
the current running containers will change when kubelet acts on these
|
||||
config changes
|
|
@ -1,2 +0,0 @@
|
|||
os-apply-config
|
||||
os-refresh-config
|
|
@ -1,12 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -eu
|
||||
set -o pipefail
|
||||
|
||||
if [ -z "${HEAT_DOCKER_IMAGE_ARCHIVE:-}" ]; then
|
||||
echo "HEAT_DOCKER_IMAGE_ARCHIVE not set for heat-config-kubelet element" >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
sudo mkdir -p $TMP_MOUNT_PATH/opt/heat-docker
|
||||
sudo cp $HEAT_DOCKER_IMAGE_ARCHIVE $TMP_MOUNT_PATH/opt/heat-docker/images.tar
|
|
@ -1,82 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eux
|
||||
|
||||
if [[ "rhel rhel7 centos7 fedora" =~ "$DISTRO_NAME" ]]; then
|
||||
yum -y install --enablerepo=updates-testing kubernetes bridge-utils
|
||||
|
||||
cat > /etc/sysconfig/network-scripts/ifcfg-cbr0 <<EOF
|
||||
DEVICE=cbr0
|
||||
TYPE=Bridge
|
||||
IPADDR=10.240.1.1
|
||||
NETMASK=255.255.255.0
|
||||
ONBOOT=yes
|
||||
STP=yes
|
||||
MTU=1450
|
||||
|
||||
# With the default forwarding delay of 15 seconds,
|
||||
# many operations in a 'docker build' will simply timeout
|
||||
# before the bridge starts forwarding.
|
||||
DELAY=2
|
||||
EOF
|
||||
|
||||
cat > /etc/sysconfig/network-scripts/route-cbr0 <<EOF
|
||||
10.240.0.0/16 dev cbr0 scope link src 10.240.1.1
|
||||
EOF
|
||||
|
||||
# defer docker starting until cbr0 is up
|
||||
cat > /etc/systemd/system/docker.service <<EOF
|
||||
.include /usr/lib/systemd/system/docker.service
|
||||
[Unit]
|
||||
After=network-online.target docker.socket
|
||||
EOF
|
||||
|
||||
cat > /etc/systemd/system/heat-config-kubelet-nat-rule.service <<EOF
|
||||
[Unit]
|
||||
Description=iptables rule to allow nat masquerading out of 10.240.1.0/24
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/sbin/iptables -t nat -A POSTROUTING -o eth0 -s 10.240.1.0/24 -j MASQUERADE
|
||||
Type=oneshot
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
if [ -f "/opt/heat-docker/images.tar" ]; then
|
||||
cat > /etc/systemd/system/heat-config-kubelet-load-images.service <<EOF
|
||||
[Unit]
|
||||
Description=Call docker load on /opt/heat-config/images.tar
|
||||
After=docker.service
|
||||
Before=os-collect-config.service kubelet.service
|
||||
|
||||
[Service]
|
||||
ExecStart=/bin/docker load -i /opt/heat-docker/images.tar
|
||||
ExecStart=/bin/rm -f /opt/heat-docker/images.tar
|
||||
Type=oneshot
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable heat-config-kubelet-load-images.service
|
||||
fi
|
||||
|
||||
|
||||
cat > /etc/sysconfig/docker <<EOF
|
||||
OPTIONS=--selinux-enabled --bridge cbr0 --mtu 1450 --iptables=false --insecure-registry 192.168.20.112:5001
|
||||
EOF
|
||||
|
||||
sed -e 's|KUBELET_ARGS=""|KUBELET_ARGS="--config=/var/lib/heat-config/heat-config-kubelet/kubelet-manifests"|g' -i /etc/kubernetes/kubelet
|
||||
sed -e '/KUBE_ETCD_SERVERS/ s/^#*/#/' -i /etc/kubernetes/config
|
||||
systemctl disable docker.service
|
||||
systemctl enable docker.service
|
||||
systemctl enable kubelet.service
|
||||
systemctl enable heat-config-kubelet-nat-rule.service
|
||||
systemctl disable firewalld
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-kubelet.py /var/lib/heat-config/hooks/kubelet
|
||||
|
||||
else
|
||||
echo "Distribution '$DISTRO_NAME' is not supported"
|
||||
exit 1
|
||||
fi
|
|
@ -1,215 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import cStringIO
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import six
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
|
||||
try:
|
||||
import docker
|
||||
except ImportError:
|
||||
docker = None
|
||||
|
||||
|
||||
DOCKER_BASE_URL = os.environ.get('DOCKER_HOST',
|
||||
'unix:///var/run/docker.sock')
|
||||
|
||||
|
||||
DEFAULT_IMAGES_TIMEOUT = 600
|
||||
|
||||
|
||||
DEFAULT_CONTAINERS_TIMEOUT = 120
|
||||
|
||||
|
||||
DEFAULT_POLL_PERIOD = 5
|
||||
|
||||
|
||||
def get_client(log):
|
||||
kwargs = {}
|
||||
kwargs['base_url'] = DOCKER_BASE_URL
|
||||
log.debug('Connecting to %s' % DOCKER_BASE_URL)
|
||||
client = docker.Client(**kwargs)
|
||||
client._version = client.version()['ApiVersion']
|
||||
log.debug('Connected to version %s' % client._version)
|
||||
return client
|
||||
|
||||
|
||||
def id_to_pod_name_part(config_id):
|
||||
return config_id.replace('-', '')[:15]
|
||||
|
||||
|
||||
def container_pattern(config_id, container_name):
|
||||
return '^/k8s_%s\.[0-9a-z]{8}_%s' % (
|
||||
container_name, id_to_pod_name_part(config_id))
|
||||
|
||||
|
||||
def required_images(c):
|
||||
containers = c['config'].get('containers', [])
|
||||
return set(container['image'] for container in containers)
|
||||
|
||||
|
||||
def required_container_patterns(c):
|
||||
config_id = c['id']
|
||||
containers = c['config'].get('containers', [])
|
||||
return dict((container['name'], container_pattern(
|
||||
config_id, container['name'])) for container in containers)
|
||||
|
||||
|
||||
def configure_logging():
|
||||
log = logging.getLogger('heat-config')
|
||||
log.setLevel('DEBUG')
|
||||
formatter = logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s')
|
||||
|
||||
# debug log to stderr
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(formatter)
|
||||
log.addHandler(handler)
|
||||
|
||||
deploy_stdout = cStringIO.StringIO()
|
||||
handler = logging.StreamHandler(deploy_stdout)
|
||||
handler.setFormatter(formatter)
|
||||
handler.setLevel('DEBUG')
|
||||
log.addHandler(handler)
|
||||
|
||||
deploy_stderr = cStringIO.StringIO()
|
||||
handler = logging.StreamHandler(deploy_stderr)
|
||||
handler.setFormatter(formatter)
|
||||
handler.setLevel('WARN')
|
||||
log.addHandler(handler)
|
||||
|
||||
return log, deploy_stdout, deploy_stderr
|
||||
|
||||
|
||||
def wait_required_images(client, log, images_timeout, poll_period, images):
|
||||
log.info(
|
||||
'Waiting for images: %s' % ', '.join(images))
|
||||
timeout = time.time() + images_timeout
|
||||
|
||||
def image_prefixes(images):
|
||||
for image in images:
|
||||
if ':' in image:
|
||||
yield image
|
||||
else:
|
||||
yield '%s:' % image
|
||||
|
||||
matching_prefixes = list(image_prefixes(images))
|
||||
|
||||
def image_names(all_images):
|
||||
for image in all_images:
|
||||
for name in image['RepoTags']:
|
||||
yield name
|
||||
|
||||
while matching_prefixes:
|
||||
all_images = list(image_names(client.images()))
|
||||
for image_prefix in matching_prefixes:
|
||||
for image in all_images:
|
||||
if image.startswith(image_prefix):
|
||||
log.info('Found image: %s' % image)
|
||||
matching_prefixes.remove(image_prefix)
|
||||
|
||||
if time.time() > timeout:
|
||||
raise Exception('Timed out after %s seconds waiting for '
|
||||
'matching images: %s' % (
|
||||
images_timeout,
|
||||
', '.join(matching_prefixes)))
|
||||
if poll_period:
|
||||
time.sleep(poll_period)
|
||||
|
||||
|
||||
def wait_required_containers(client, log,
|
||||
containers_timeout, poll_period,
|
||||
container_patterns):
|
||||
patterns = container_patterns.values()
|
||||
log.info(
|
||||
'Waiting for containers matching: %s' % ', '.join(patterns))
|
||||
|
||||
timeout = time.time() + containers_timeout
|
||||
|
||||
def containers_names(containers):
|
||||
for container in containers:
|
||||
for name in container['Names']:
|
||||
yield name
|
||||
|
||||
waiting_for = dict((v, re.compile(v)) for v in patterns)
|
||||
while waiting_for:
|
||||
for name in containers_names(client.containers()):
|
||||
for k, v in six.iteritems(waiting_for):
|
||||
if v.match(name):
|
||||
log.info('Pattern %s matches: %s' % (k, name))
|
||||
del(waiting_for[k])
|
||||
break
|
||||
if time.time() > timeout:
|
||||
raise Exception('Timed out after %s seconds waiting for '
|
||||
'matching containers: %s' % (
|
||||
containers_timeout,
|
||||
', '.join(waiting_for.keys)))
|
||||
if poll_period:
|
||||
time.sleep(poll_period)
|
||||
|
||||
|
||||
def main(argv=sys.argv, sys_stdin=sys.stdin, sys_stdout=sys.stdout):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
|
||||
(log, deploy_stdout, deploy_stderr) = configure_logging()
|
||||
client = get_client(log)
|
||||
|
||||
c = json.load(sys.stdin)
|
||||
|
||||
images_timeout = c['options'].get(
|
||||
'images_timeout', DEFAULT_IMAGES_TIMEOUT)
|
||||
containers_timeout = c['options'].get(
|
||||
'containers_timeout', DEFAULT_CONTAINERS_TIMEOUT)
|
||||
poll_period = c['options'].get(
|
||||
'poll_period', DEFAULT_POLL_PERIOD)
|
||||
|
||||
pod_state = 0
|
||||
|
||||
try:
|
||||
wait_required_images(
|
||||
client,
|
||||
log,
|
||||
images_timeout,
|
||||
poll_period,
|
||||
required_images(c))
|
||||
|
||||
wait_required_containers(
|
||||
client,
|
||||
log,
|
||||
containers_timeout,
|
||||
poll_period,
|
||||
required_container_patterns(c))
|
||||
|
||||
except Exception as ex:
|
||||
pod_state = 1
|
||||
log.error('An error occurred deploying pod %s' % c['id'])
|
||||
log.exception(ex)
|
||||
|
||||
response = {
|
||||
'deploy_stdout': deploy_stdout.getvalue(),
|
||||
'deploy_stderr': deploy_stderr.getvalue(),
|
||||
'deploy_status_code': pod_state,
|
||||
}
|
||||
json.dump(response, sys_stdout)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,72 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import requests
|
||||
|
||||
MANIFESTS_DIR = os.environ.get('HEAT_KUBELET_MANIFESTS',
|
||||
'/var/lib/heat-config/heat-config-kubelet'
|
||||
'/kubelet-manifests')
|
||||
CONF_FILE = os.environ.get('HEAT_SHELL_CONFIG',
|
||||
'/var/run/heat-config/heat-config')
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
if not os.path.exists(CONF_FILE):
|
||||
log.error('No config file %s' % CONF_FILE)
|
||||
return 1
|
||||
|
||||
if not os.path.isdir(MANIFESTS_DIR):
|
||||
os.makedirs(MANIFESTS_DIR, 0o700)
|
||||
|
||||
for f in glob.glob('%s/*.json'):
|
||||
os.remove(f)
|
||||
|
||||
try:
|
||||
configs = json.load(open(CONF_FILE))
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
for c in configs:
|
||||
try:
|
||||
write_manifest(c)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
|
||||
|
||||
def write_manifest(c):
|
||||
group = c.get('group')
|
||||
if group != 'kubelet':
|
||||
return
|
||||
|
||||
fn = os.path.join(MANIFESTS_DIR, '%s.json' % c['id'])
|
||||
with os.fdopen(os.open(fn, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
|
||||
json.dump(c['config'], f, indent=2)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,16 +0,0 @@
|
|||
A hook which invokes ``puppet apply`` on the provided configuration.
|
||||
|
||||
Config inputs are passed in as facts and/or using hiera, and output values
|
||||
are read from written-out files.
|
||||
|
||||
Hook Options:
|
||||
-------------
|
||||
use_facter: default True. Set to True to pass puppet inputs via Facter
|
||||
use_hiera: default False. Set to True to pass puppet inputs via Hiera
|
||||
modulepath: If set, puppet will use this filesystem path to load modules
|
||||
tags: If set, puppet will use the specified value(s) to apply only a
|
||||
subset of the catalog for a given manifest.
|
||||
enable_debug: default False. Set to True to run puppet apply in debug mode
|
||||
and have it captured on the node to /var/log/puppet/heat-debug.log
|
||||
enable_verbose: default False. Set to True to run puppet apply in verbose mode
|
||||
and have it captured on the node to /var/log/puppet/heat-verbose.log
|
|
@ -1 +0,0 @@
|
|||
heat-config
|
|
@ -1,7 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
install-packages puppet
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-puppet.py /var/lib/heat-config/hooks/puppet
|
|
@ -1,186 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
||||
WORKING_DIR = os.environ.get('HEAT_PUPPET_WORKING',
|
||||
'/var/lib/heat-config/heat-config-puppet')
|
||||
OUTPUTS_DIR = os.environ.get('HEAT_PUPPET_OUTPUTS',
|
||||
'/var/run/heat-config/heat-config-puppet')
|
||||
PUPPET_CMD = os.environ.get('HEAT_PUPPET_CMD', 'puppet')
|
||||
PUPPET_LOGDIR = os.environ.get(
|
||||
'HEAT_PUPPET_LOGDIR', '/var/run/heat-config/deployed'
|
||||
)
|
||||
HIERA_DATADIR = os.environ.get('HEAT_PUPPET_HIERA_DATADIR',
|
||||
'/etc/puppet/hieradata')
|
||||
|
||||
|
||||
def prepare_dir(path):
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
|
||||
def get_hostname_f(log):
|
||||
subproc = subprocess.Popen(['hostname', '-f'], stdout=subprocess.PIPE)
|
||||
out = subproc.communicate()[0]
|
||||
if subproc.returncode == 0:
|
||||
return out.strip()
|
||||
else:
|
||||
log.warn("Failed to retrieve 'hostname -f' output")
|
||||
return None
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
prepare_dir(OUTPUTS_DIR)
|
||||
prepare_dir(WORKING_DIR)
|
||||
os.chdir(WORKING_DIR)
|
||||
|
||||
c = json.load(sys.stdin)
|
||||
|
||||
use_hiera = c['options'].get('enable_hiera', False)
|
||||
use_facter = c['options'].get('enable_facter', True)
|
||||
modulepath = c['options'].get('modulepath')
|
||||
tags = c['options'].get('tags')
|
||||
debug = c['options'].get('enable_debug', False)
|
||||
verbose = c['options'].get('enable_verbose', False)
|
||||
|
||||
facts = {}
|
||||
hiera = {}
|
||||
|
||||
fqdn = get_hostname_f(log)
|
||||
if fqdn:
|
||||
facts['FACTER_fqdn'] = fqdn
|
||||
|
||||
for input in c['inputs']:
|
||||
input_name = input['name']
|
||||
input_value = input.get('value', '')
|
||||
if use_facter:
|
||||
fact_name = 'FACTER_%s' % input_name
|
||||
facts[fact_name] = input_value
|
||||
if use_hiera:
|
||||
hiera[input_name] = input_value
|
||||
|
||||
if use_hiera:
|
||||
prepare_dir(HIERA_DATADIR)
|
||||
hiera_data = os.path.join(HIERA_DATADIR,
|
||||
'heat_config_%s.json' % c['name'])
|
||||
with os.fdopen(os.open(hiera_data,
|
||||
os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600),
|
||||
'w') as hiera_file:
|
||||
hiera_file.write(json.dumps(hiera).encode('utf8'))
|
||||
facts['FACTER_deploy_config_name'] = c['name']
|
||||
|
||||
fn = os.path.join(WORKING_DIR, '%s.pp' % c['id'])
|
||||
heat_outputs_path = os.path.join(OUTPUTS_DIR, c['id'])
|
||||
facts['FACTER_heat_outputs_path'] = heat_outputs_path
|
||||
|
||||
env_debug = ' '.join('%s="%s" ' % (k, v) for k, v in facts.items())
|
||||
|
||||
env = os.environ.copy()
|
||||
env.update(facts)
|
||||
|
||||
with os.fdopen(os.open(fn, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o700),
|
||||
'w') as f:
|
||||
f.write(c.get('config', '').encode('utf-8'))
|
||||
|
||||
cmd = [PUPPET_CMD, 'apply', '--detailed-exitcodes', fn]
|
||||
# This is the default log destination to print out to the console and
|
||||
# captured by heat via the subprocess method below.
|
||||
cmd.insert(-1, '--logdest')
|
||||
cmd.insert(-1, 'console')
|
||||
if modulepath:
|
||||
cmd.insert(-1, '--modulepath')
|
||||
cmd.insert(-1, modulepath)
|
||||
if tags:
|
||||
cmd.insert(-1, '--tags')
|
||||
cmd.insert(-1, tags)
|
||||
if debug:
|
||||
cmd.insert(-1, '--debug')
|
||||
cmd.insert(-1, '--logdest')
|
||||
cmd.insert(-1, '/var/log/puppet/heat-debug.log')
|
||||
if verbose:
|
||||
cmd.insert(-1, '--verbose')
|
||||
cmd.insert(-1, '--logdest')
|
||||
cmd.insert(-1, '/var/log/puppet/heat-verbose.log')
|
||||
|
||||
prepare_dir(PUPPET_LOGDIR)
|
||||
timestamp = re.sub('[:T]', '-', c['creation_time'])
|
||||
base_path = os.path.join(
|
||||
PUPPET_LOGDIR, '{timestamp}-{c[id]}'.format(**locals())
|
||||
)
|
||||
stdout_log = open('{0}-stdout.log'.format(base_path), 'w')
|
||||
stderr_log = open('{0}-stderr.log'.format(base_path), 'w')
|
||||
log.debug('Running %s %s' % (env_debug, ' '.join(cmd)))
|
||||
try:
|
||||
subproc = subprocess.Popen(
|
||||
cmd, stdout=stdout_log, stderr=stderr_log, env=env
|
||||
)
|
||||
subproc.wait()
|
||||
except OSError:
|
||||
log.warn('puppet not installed yet')
|
||||
return
|
||||
finally:
|
||||
stdout_log.close()
|
||||
stderr_log.close()
|
||||
|
||||
log.info('Return code %s' % subproc.returncode)
|
||||
response = {}
|
||||
for i in 'stdout', 'stderr':
|
||||
with open('{0}-{1}.log'.format(base_path, i)) as logfile:
|
||||
content = logfile.read()
|
||||
if content.strip():
|
||||
log.info(content)
|
||||
response['deploy_{0}'.format(i)] = content
|
||||
|
||||
# returncode of 2 means there were successful changes
|
||||
if subproc.returncode in (0, 2):
|
||||
returncode = 0
|
||||
log.info('Completed %s' % fn)
|
||||
else:
|
||||
returncode = subproc.returncode
|
||||
log.error("Error running %s. [%s]\n" % (fn, subproc.returncode))
|
||||
|
||||
for output in c.get('outputs') or []:
|
||||
output_name = output['name']
|
||||
try:
|
||||
with open('%s.%s' % (heat_outputs_path, output_name)) as out:
|
||||
response[output_name] = out.read()
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
response.update({
|
||||
'deploy_status_code': returncode,
|
||||
})
|
||||
json.dump(response, sys.stdout)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,3 +0,0 @@
|
|||
A hook which uses salt library to apply the provided configuration
|
||||
as a state. Config inputs are passed as opts and output values are
|
||||
read from the yaml returned.
|
|
@ -1 +0,0 @@
|
|||
heat-config
|
|
@ -1,8 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
install-packages salt-minion
|
||||
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-salt.py /var/lib/heat-config/hooks/salt
|
|
@ -1,132 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import salt.cli.caller
|
||||
import salt.config
|
||||
from salt import exceptions
|
||||
import warnings
|
||||
import yaml
|
||||
|
||||
|
||||
WORKING_DIR = os.environ.get('HEAT_SALT_WORKING',
|
||||
'/var/lib/heat-config/heat-config-salt')
|
||||
SALT_MINION_CONFIG = os.environ.get('SALT_MINION_CONFIG',
|
||||
'/etc/salt/minion')
|
||||
|
||||
|
||||
def prepare_dir(path):
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
prepare_dir(WORKING_DIR)
|
||||
os.chdir(WORKING_DIR)
|
||||
|
||||
c = json.load(sys.stdin)
|
||||
|
||||
opts = salt.config.minion_config(SALT_MINION_CONFIG)
|
||||
|
||||
opts['file_roots'] = {'base': [WORKING_DIR]}
|
||||
opts['file_client'] = 'local'
|
||||
opts['local'] = 'local'
|
||||
opts['fun'] = 'state.sls'
|
||||
opts['arg'] = [c['id']]
|
||||
|
||||
for input in c['inputs']:
|
||||
key = input['name']
|
||||
opts[key] = input.get('value', '')
|
||||
|
||||
state_file = '%s.sls' % c['id']
|
||||
config = c.get('config', '')
|
||||
|
||||
if isinstance(config, dict):
|
||||
yaml_config = yaml.safe_dump(config, default_flow_style=False)
|
||||
else:
|
||||
yaml_config = config
|
||||
|
||||
fn = os.path.join(WORKING_DIR, state_file)
|
||||
with os.fdopen(os.open(fn, os.O_CREAT | os.O_WRONLY, 0o700), 'w') as f:
|
||||
f.write(yaml_config.encode('utf-8'))
|
||||
|
||||
caller = salt.cli.caller.Caller.factory(opts)
|
||||
|
||||
log.debug('Applying Salt state %s' % state_file)
|
||||
|
||||
stdout, stderr = None, None
|
||||
ret = {}
|
||||
|
||||
try:
|
||||
ret = caller.call()
|
||||
except exceptions.SaltInvocationError as err:
|
||||
log.error(
|
||||
'Salt invocation error while applying Salt sate %s' % state_file)
|
||||
stderr = err
|
||||
|
||||
if ret:
|
||||
|
||||
log.info('Results: %s' % ret)
|
||||
output = yaml.safe_dump(ret['return'])
|
||||
|
||||
# returncode of 0 means there were successful changes
|
||||
if ret['retcode'] == 0:
|
||||
log.info('Completed applying salt state %s' % state_file)
|
||||
stdout = output
|
||||
else:
|
||||
# Salt doesn't always return sane return codes so we have to check
|
||||
# individual results
|
||||
runfailed = False
|
||||
for state, data in ret['return'].items():
|
||||
if not data['result']:
|
||||
runfailed = True
|
||||
break
|
||||
if runfailed:
|
||||
log.error('Error applying Salt state %s. [%s]\n'
|
||||
% (state_file, ret['retcode']))
|
||||
stderr = output
|
||||
else:
|
||||
ret['retcode'] = 0
|
||||
stdout = output
|
||||
|
||||
response = {}
|
||||
|
||||
for output in c.get('outputs', []):
|
||||
output_name = output['name']
|
||||
response[output_name] = ret.get(output_name)
|
||||
|
||||
response.update({
|
||||
'deploy_stdout': stdout,
|
||||
'deploy_stderr': stderr,
|
||||
'deploy_status_code': ret['retcode'],
|
||||
})
|
||||
json.dump(response, sys.stdout)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,3 +0,0 @@
|
|||
A hook which invokes the provided configuration as an executable script.
|
||||
Config inputs are passed in as environment variables, and output values are
|
||||
read from written-out files.
|
|
@ -1 +0,0 @@
|
|||
heat-config
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -x
|
||||
|
||||
SCRIPTDIR=$(dirname $0)
|
||||
|
||||
install -D -g root -o root -m 0755 ${SCRIPTDIR}/hook-script.py /var/lib/heat-config/hooks/script
|
|
@ -1,99 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
WORKING_DIR = os.environ.get('HEAT_SCRIPT_WORKING',
|
||||
'/var/lib/heat-config/heat-config-script')
|
||||
OUTPUTS_DIR = os.environ.get('HEAT_SCRIPT_OUTPUTS',
|
||||
'/var/run/heat-config/heat-config-script')
|
||||
|
||||
|
||||
def prepare_dir(path):
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
warnings.warn('This hook is deprecated, please use hooks from heat-agents '
|
||||
'repository instead.', DeprecationWarning)
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
prepare_dir(OUTPUTS_DIR)
|
||||
prepare_dir(WORKING_DIR)
|
||||
os.chdir(WORKING_DIR)
|
||||
|
||||
c = json.load(sys.stdin)
|
||||
|
||||
env = os.environ.copy()
|
||||
for input in c['inputs']:
|
||||
input_name = input['name']
|
||||
value = input.get('value', '')
|
||||
if isinstance(value, dict) or isinstance(value, list):
|
||||
env[input_name] = json.dumps(value)
|
||||
else:
|
||||
env[input_name] = value
|
||||
log.info('%s=%s' % (input_name, env[input_name]))
|
||||
|
||||
fn = os.path.join(WORKING_DIR, c['id'])
|
||||
heat_outputs_path = os.path.join(OUTPUTS_DIR, c['id'])
|
||||
env['heat_outputs_path'] = heat_outputs_path
|
||||
|
||||
with os.fdopen(os.open(fn, os.O_CREAT | os.O_WRONLY, 0o700), 'w') as f:
|
||||
f.write(c.get('config', '').encode('utf-8'))
|
||||
|
||||
log.debug('Running %s' % fn)
|
||||
subproc = subprocess.Popen([fn], stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE, env=env)
|
||||
stdout, stderr = subproc.communicate()
|
||||
|
||||
log.info(stdout)
|
||||
log.debug(stderr)
|
||||
|
||||
if subproc.returncode:
|
||||
log.error("Error running %s. [%s]\n" % (fn, subproc.returncode))
|
||||
else:
|
||||
log.info('Completed %s' % fn)
|
||||
|
||||
response = {}
|
||||
|
||||
for output in c.get('outputs') or []:
|
||||
output_name = output['name']
|
||||
try:
|
||||
with open('%s.%s' % (heat_outputs_path, output_name)) as out:
|
||||
response[output_name] = out.read()
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
response.update({
|
||||
'deploy_stdout': stdout,
|
||||
'deploy_stderr': stderr,
|
||||
'deploy_status_code': subproc.returncode,
|
||||
})
|
||||
|
||||
json.dump(response, sys.stdout)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1,3 +0,0 @@
|
|||
This is an os-refresh-config script which iterates over deployments configuration
|
||||
data and invokes the appropriate hook for each deployment item. Any outputs returned
|
||||
by the hook will be signalled back to heat using the configured signalling method.
|
|
@ -1,163 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import requests
|
||||
|
||||
try:
|
||||
from heatclient import client as heatclient
|
||||
except ImportError:
|
||||
heatclient = None
|
||||
|
||||
try:
|
||||
from keystoneclient.v3 import client as ksclient
|
||||
except ImportError:
|
||||
ksclient = None
|
||||
|
||||
try:
|
||||
from zaqarclient.queues.v1 import client as zaqarclient
|
||||
except ImportError:
|
||||
zaqarclient = None
|
||||
|
||||
|
||||
MAX_RESPONSE_SIZE = 950000
|
||||
|
||||
|
||||
def init_logging():
|
||||
log = logging.getLogger('heat-config-notify')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
return log
|
||||
|
||||
|
||||
def trim_response(response, trimmed_values=None):
|
||||
"""Trim selected values from response.
|
||||
|
||||
Makes given response smaller or the same size as MAX_RESPONSE_SIZE by
|
||||
trimming given trimmed_values from response dict from the left side
|
||||
(beginning). Returns trimmed and serialized JSON response itself.
|
||||
"""
|
||||
|
||||
trimmed_values = trimmed_values or ('deploy_stdout', 'deploy_stderr')
|
||||
str_response = json.dumps(response, ensure_ascii=True, encoding='utf-8')
|
||||
len_total = len(str_response)
|
||||
offset = MAX_RESPONSE_SIZE - len_total
|
||||
if offset >= 0:
|
||||
return str_response
|
||||
offset = abs(offset)
|
||||
for key in trimmed_values:
|
||||
len_value = len(response[key])
|
||||
cut = int(round(float(len_value) / len_total * offset))
|
||||
response[key] = response[key][cut:]
|
||||
str_response = json.dumps(response, ensure_ascii=True, encoding='utf-8')
|
||||
return str_response
|
||||
|
||||
|
||||
def main(argv=sys.argv, stdin=sys.stdin):
|
||||
|
||||
log = init_logging()
|
||||
usage = ('Usage:\n heat-config-notify /path/to/config.json '
|
||||
'< /path/to/signal_data.json')
|
||||
|
||||
if len(argv) < 2:
|
||||
log.error(usage)
|
||||
return 1
|
||||
|
||||
try:
|
||||
signal_data = json.load(stdin)
|
||||
except ValueError:
|
||||
log.warn('No valid json found on stdin')
|
||||
signal_data = {}
|
||||
|
||||
conf_file = argv[1]
|
||||
if not os.path.exists(conf_file):
|
||||
log.error('No config file %s' % conf_file)
|
||||
log.error(usage)
|
||||
return 1
|
||||
|
||||
c = json.load(open(conf_file))
|
||||
|
||||
iv = dict((i['name'], i['value']) for i in c['inputs'])
|
||||
|
||||
if 'deploy_signal_id' in iv:
|
||||
sigurl = iv.get('deploy_signal_id')
|
||||
sigverb = iv.get('deploy_signal_verb', 'POST')
|
||||
log.debug('Signaling to %s via %s' % (sigurl, sigverb))
|
||||
# we need to trim log content because Heat response size is limited
|
||||
# by max_json_body_size = 1048576
|
||||
str_signal_data = trim_response(signal_data)
|
||||
if sigverb == 'PUT':
|
||||
r = requests.put(sigurl, data=str_signal_data,
|
||||
headers={'content-type': 'application/json'})
|
||||
else:
|
||||
r = requests.post(sigurl, data=str_signal_data,
|
||||
headers={'content-type': 'application/json'})
|
||||
log.debug('Response %s ' % r)
|
||||
|
||||
if 'deploy_queue_id' in iv:
|
||||
queue_id = iv.get('deploy_queue_id')
|
||||
log.debug('Signaling to queue %s' % (queue_id,))
|
||||
|
||||
ks = ksclient.Client(
|
||||
auth_url=iv['deploy_auth_url'],
|
||||
user_id=iv['deploy_user_id'],
|
||||
password=iv['deploy_password'],
|
||||
project_id=iv['deploy_project_id'])
|
||||
endpoint = ks.service_catalog.url_for(
|
||||
service_type='messaging', endpoint_type='publicURL')
|
||||
|
||||
conf = {
|
||||
'auth_opts': {
|
||||
'backend': 'keystone',
|
||||
'options': {
|
||||
'os_auth_token': ks.auth_token,
|
||||
'os_project_id': iv['deploy_project_id'],
|
||||
}
|
||||
}
|
||||
}
|
||||
cli = zaqarclient.Client(endpoint, conf=conf, version=1.1)
|
||||
queue = cli.queue(queue_id)
|
||||
r = queue.post({'body': signal_data, 'ttl': 600})
|
||||
log.debug('Response %s ' % r)
|
||||
|
||||
elif 'deploy_auth_url' in iv:
|
||||
ks = ksclient.Client(
|
||||
auth_url=iv['deploy_auth_url'],
|
||||
user_id=iv['deploy_user_id'],
|
||||
password=iv['deploy_password'],
|
||||
project_id=iv['deploy_project_id'])
|
||||
endpoint = ks.service_catalog.url_for(
|
||||
service_type='orchestration', endpoint_type='publicURL')
|
||||
log.debug('Signalling to %s' % endpoint)
|
||||
heat = heatclient.Client(
|
||||
'1', endpoint, token=ks.auth_token)
|
||||
r = heat.resources.signal(
|
||||
iv.get('deploy_stack_id'),
|
||||
iv.get('deploy_resource_name'),
|
||||
data=signal_data)
|
||||
log.debug('Response %s ' % r)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv, sys.stdin))
|
|
@ -1,41 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# This script will create the needed files under /var/run/heat-config so that
|
||||
# any deployments that have already been queried from the Heat api via
|
||||
# os-collect-config are not executed.
|
||||
#
|
||||
# This is a workaround for:
|
||||
# https://bugs.launchpad.net/heat-templates/+bug/1513220
|
||||
# where /var/run/heat-config has already been lost due to system reboot.
|
||||
|
||||
set -eu
|
||||
|
||||
deployments=$(mktemp)
|
||||
|
||||
echo "Reading deployments via os-apply-config to $deployments"
|
||||
os-apply-config --key deployments --type raw | jq . > $deployments
|
||||
|
||||
num_deployments=$(jq length $deployments)
|
||||
echo "Found $num_deployments deployments."
|
||||
let "num_deployments -= 1"
|
||||
|
||||
if [ -e /var/lib/heat-config/deployed ]; then
|
||||
deployed_dir=/var/lib/heat-config/deployed
|
||||
else
|
||||
deployed_dir=/var/run/heat-config/deployed
|
||||
fi
|
||||
mkdir -p $deployed_dir
|
||||
|
||||
for idx in $(seq 0 $num_deployments); do
|
||||
deployment=$(jq .[$idx] $deployments)
|
||||
deployment_id=$(jq -r .id <<<$deployment)
|
||||
deployment_group=$(jq -r .group <<<$deployment)
|
||||
if [ "$deployment_group" = "os-apply-config" -o \
|
||||
"$deployment_group" = "Heat::Ungrouped" ]; then
|
||||
echo "Skipping creating deployed file for deployment $deployment_id as it is group:$deployment_group"
|
||||
continue
|
||||
else
|
||||
echo "Creating $deployed_dir/${deployment_id}.json so that deployment will not be re-run"
|
||||
touch $deployed_dir/${deployment_id}.json
|
||||
fi
|
||||
done
|
|
@ -1,2 +0,0 @@
|
|||
os-apply-config
|
||||
os-refresh-config
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eux
|
||||
|
||||
install-packages python-heatclient python-zaqarclient
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eux
|
||||
|
||||
pip install python-heatclient python-zaqarclient
|
|
@ -1 +0,0 @@
|
|||
{{deployments}}
|
|
@ -1,195 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import requests
|
||||
import six
|
||||
|
||||
HOOKS_DIR_PATHS = (
|
||||
os.environ.get('HEAT_CONFIG_HOOKS'),
|
||||
'/usr/libexec/heat-config/hooks',
|
||||
'/var/lib/heat-config/hooks',
|
||||
)
|
||||
CONF_FILE = os.environ.get('HEAT_SHELL_CONFIG',
|
||||
'/var/run/heat-config/heat-config')
|
||||
DEPLOYED_DIR = os.environ.get('HEAT_CONFIG_DEPLOYED',
|
||||
'/var/lib/heat-config/deployed')
|
||||
OLD_DEPLOYED_DIR = os.environ.get('HEAT_CONFIG_DEPLOYED_OLD',
|
||||
'/var/run/heat-config/deployed')
|
||||
HEAT_CONFIG_NOTIFY = os.environ.get('HEAT_CONFIG_NOTIFY',
|
||||
'heat-config-notify')
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
log = logging.getLogger('heat-config')
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
|
||||
log.addHandler(handler)
|
||||
log.setLevel('DEBUG')
|
||||
|
||||
if not os.path.exists(CONF_FILE):
|
||||
log.error('No config file %s' % CONF_FILE)
|
||||
return 1
|
||||
|
||||
conf_mode = stat.S_IMODE(os.lstat(CONF_FILE).st_mode)
|
||||
if conf_mode != 0o600:
|
||||
os.chmod(CONF_FILE, 0o600)
|
||||
|
||||
if not os.path.isdir(DEPLOYED_DIR):
|
||||
if DEPLOYED_DIR != OLD_DEPLOYED_DIR and os.path.isdir(OLD_DEPLOYED_DIR):
|
||||
log.debug('Migrating deployed state from %s to %s' %
|
||||
(OLD_DEPLOYED_DIR, DEPLOYED_DIR))
|
||||
shutil.move(OLD_DEPLOYED_DIR, DEPLOYED_DIR)
|
||||
else:
|
||||
os.makedirs(DEPLOYED_DIR, 0o700)
|
||||
|
||||
try:
|
||||
configs = json.load(open(CONF_FILE))
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
for c in configs:
|
||||
try:
|
||||
invoke_hook(c, log)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
|
||||
|
||||
def find_hook_path(group):
|
||||
# sanitise the group to get an alphanumeric hook file name
|
||||
hook = "".join(
|
||||
x for x in group if x == '-' or x == '_' or x.isalnum())
|
||||
|
||||
for h in HOOKS_DIR_PATHS:
|
||||
if not h or not os.path.exists(h):
|
||||
continue
|
||||
hook_path = os.path.join(h, hook)
|
||||
if os.path.exists(hook_path):
|
||||
return hook_path
|
||||
|
||||
|
||||
def invoke_hook(c, log):
|
||||
# Sanitize input values (bug 1333992). Convert all String
|
||||
# inputs to strings if they're not already
|
||||
hot_inputs = c.get('inputs', [])
|
||||
for hot_input in hot_inputs:
|
||||
if hot_input.get('type', None) == 'String' and \
|
||||
not isinstance(hot_input['value'], six.text_type):
|
||||
hot_input['value'] = str(hot_input['value'])
|
||||
iv = dict((i['name'], i['value']) for i in c['inputs'])
|
||||
# The group property indicates whether it is softwarecomponent or
|
||||
# plain softwareconfig
|
||||
# If it is softwarecomponent, pick up a property config to invoke
|
||||
# according to deploy_action
|
||||
group = c.get('group')
|
||||
if group == 'component':
|
||||
found = False
|
||||
action = iv.get('deploy_action')
|
||||
config = c.get('config')
|
||||
configs = config.get('configs')
|
||||
if configs:
|
||||
for cfg in configs:
|
||||
if action in cfg['actions']:
|
||||
c['config'] = cfg['config']
|
||||
c['group'] = cfg['tool']
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
log.warn('Skipping group %s, no valid script is defined'
|
||||
' for deploy action %s' % (group, action))
|
||||
return
|
||||
|
||||
# check to see if this config is already deployed
|
||||
deployed_path = os.path.join(DEPLOYED_DIR, '%s.json' % c['id'])
|
||||
|
||||
if os.path.exists(deployed_path):
|
||||
log.warn('Skipping config %s, already deployed' % c['id'])
|
||||
log.warn('To force-deploy, rm %s' % deployed_path)
|
||||
return
|
||||
|
||||
signal_data = {}
|
||||
hook_path = find_hook_path(c['group'])
|
||||
|
||||
if not hook_path:
|
||||
log.warn('Skipping group %s with no hook script %s' % (
|
||||
c['group'], hook_path))
|
||||
return
|
||||
|
||||
# write out config, which indicates it is deployed regardless of
|
||||
# subsequent hook success
|
||||
with os.fdopen(os.open(
|
||||
deployed_path, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
|
||||
json.dump(c, f, indent=2)
|
||||
|
||||
log.debug('Running %s < %s' % (hook_path, deployed_path))
|
||||
subproc = subprocess.Popen([hook_path],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
stdout, stderr = subproc.communicate(input=json.dumps(c))
|
||||
|
||||
log.info(stdout)
|
||||
log.debug(stderr)
|
||||
|
||||
if subproc.returncode:
|
||||
log.error("Error running %s. [%s]\n" % (
|
||||
hook_path, subproc.returncode))
|
||||
else:
|
||||
log.info('Completed %s' % hook_path)
|
||||
|
||||
try:
|
||||
if stdout:
|
||||
signal_data = json.loads(stdout)
|
||||
except ValueError:
|
||||
signal_data = {
|
||||
'deploy_stdout': stdout,
|
||||
'deploy_stderr': stderr,
|
||||
'deploy_status_code': subproc.returncode,
|
||||
}
|
||||
|
||||
signal_data_path = os.path.join(DEPLOYED_DIR, '%s.notify.json' % c['id'])
|
||||
# write out notify data for debugging
|
||||
with os.fdopen(os.open(
|
||||
signal_data_path, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
|
||||
json.dump(signal_data, f, indent=2)
|
||||
|
||||
log.debug('Running %s %s < %s' % (
|
||||
HEAT_CONFIG_NOTIFY, deployed_path, signal_data_path))
|
||||
subproc = subprocess.Popen([HEAT_CONFIG_NOTIFY, deployed_path],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
stdout, stderr = subproc.communicate(input=json.dumps(signal_data))
|
||||
|
||||
log.info(stdout)
|
||||
|
||||
if subproc.returncode:
|
||||
log.error(
|
||||
"Error running heat-config-notify. [%s]\n" % subproc.returncode)
|
||||
log.error(stderr)
|
||||
else:
|
||||
log.debug(stderr)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -2,6 +2,10 @@
|
|||
Steps to build container image with all container hooks
|
||||
=======================================================
|
||||
|
||||
Install the heat-agents repo alongside heat-templates. Some file in this
|
||||
directory are symlinks that assume the presence of the heat-agents directory in
|
||||
the same directory as the heat-templates directory.
|
||||
|
||||
Docker build does not work with soft links. Therefore, convert all
|
||||
soft links to hardlinks.
|
||||
|
||||
|
@ -13,4 +17,4 @@ Build docker image with container hooks.
|
|||
|
||||
Push the image to docker hub.
|
||||
|
||||
$docker push xxxx/heat-container-agent
|
||||
$docker push xxxx/heat-container-agent
|
||||
|
|
|
@ -1 +1 @@
|
|||
../../elements/heat-config-docker-compose/os-refresh-config/configure.d/50-heat-config-docker-compose
|
||||
../../../../../heat-agents/heat-config-docker-compose/os-refresh-config/configure.d/50-heat-config-docker-compose
|
|
@ -1 +1 @@
|
|||
../../elements/heat-config/os-refresh-config/configure.d/55-heat-config
|
||||
../../../../../heat-agents/heat-config/os-refresh-config/configure.d/55-heat-config
|
|
@ -1 +1 @@
|
|||
../../elements/heat-config/bin/heat-config-notify
|
||||
../../../../../heat-agents/heat-config/bin/heat-config-notify
|
|
@ -1 +1 @@
|
|||
../../../elements/heat-config-docker-compose/install.d/hook-docker-compose.py
|
||||
../../../../../../heat-agents/heat-config-docker-compose/install.d/hook-docker-compose.py
|
|
@ -1 +1 @@
|
|||
../../../elements/heat-config-script/install.d/hook-script.py
|
||||
../../../../../../heat-agents/heat-config-script/install.d/hook-script.py
|
|
@ -41,6 +41,7 @@
|
|||
set -x
|
||||
export PYTHONUNBUFFERED=true
|
||||
export DEVSTACK_GATE_TEMPEST=0
|
||||
export PROJECTS="openstack/heat-agents $PROJECTS"
|
||||
function post_test_hook {
|
||||
cd $BASE/new/heat-templates/tools
|
||||
./post_test_hook.sh
|
||||
|
|
16
setup.cfg
16
setup.cfg
|
@ -4,24 +4,10 @@ summary = heat-templates
|
|||
description-file =
|
||||
README.rst
|
||||
author = OpenStack
|
||||
author-email = openstack-dev@lists.openstack.org
|
||||
home-page = https://docs.openstack.org/heat/latest/
|
||||
author-email = openstack-discuss@lists.openstack.org
|
||||
classifier =
|
||||
Environment :: OpenStack
|
||||
Intended Audience :: Information Technology
|
||||
Intended Audience :: System Administrators
|
||||
License :: OSI Approved :: Apache Software License
|
||||
Operating System :: POSIX :: Linux
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.4
|
||||
|
||||
[build_sphinx]
|
||||
source-dir = doc/source
|
||||
build-dir = doc/build
|
||||
all_files = 1
|
||||
|
||||
[upload_sphinx]
|
||||
upload-dir = doc/build/html
|
||||
|
|
18
setup.py
18
setup.py
|
@ -1,18 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import setuptools
|
||||
|
||||
setuptools.setup(
|
||||
setup_requires=['pbr'],
|
||||
pbr=True)
|
|
@ -1,21 +0,0 @@
|
|||
coverage>=3.6
|
||||
discover
|
||||
dpath>=1.3.2
|
||||
fixtures>=0.3.14
|
||||
# Hacking already pins down pep8, pyflakes and flake8
|
||||
hacking>=0.10.0,<0.11
|
||||
mock>=1.0
|
||||
openstackdocstheme>=1.11.0 # Apache-2.0
|
||||
requests>=1.2.1,!=2.4.0
|
||||
requests-mock>=0.4.0 # Apache-2.0
|
||||
salt
|
||||
sphinx>=1.6.2 # BSD
|
||||
testrepository>=0.0.18
|
||||
testscenarios>=0.4
|
||||
testtools>=0.9.34
|
||||
yamllint>=1.2.0
|
||||
os-apply-config
|
||||
|
||||
python-heatclient>=1.2.0
|
||||
python-keystoneclient>=0.10.0
|
||||
python-openstackclient>=2.1.0
|
|
@ -1,38 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import testtools
|
||||
|
||||
|
||||
class RunScriptTest(testtools.TestCase):
|
||||
|
||||
def relative_path(self, from_path, *to_paths):
|
||||
return os.path.join(
|
||||
os.path.dirname(os.path.realpath(from_path)), *to_paths)
|
||||
|
||||
def run_cmd(self, args, env, input_str=None):
|
||||
subproc = subprocess.Popen(args,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
env=env)
|
||||
stdout, stderr = subproc.communicate(input=input_str)
|
||||
return subproc.returncode, stdout, stderr
|
||||
|
||||
def json_from_file(self, path):
|
||||
with open(path) as f:
|
||||
return json.load(f)
|
|
@ -1,58 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
'''
|
||||
A fake config tool for unit testing the software-config hooks.
|
||||
|
||||
JSON containing the current environment variables and command line arguments
|
||||
are written to the file specified by the path in environment variable
|
||||
TEST_STATE_PATH.
|
||||
|
||||
Environment variable TEST_RESPONSE defines JSON specifying what files to write
|
||||
out, and what to print to stdout and stderr.
|
||||
'''
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
|
||||
state_path = os.environ.get('TEST_STATE_PATH')
|
||||
|
||||
# handle multiple invocations by writing to numbered state path files
|
||||
suffix = 0
|
||||
while os.path.isfile(state_path):
|
||||
suffix += 1
|
||||
state_path = '%s_%s' % (os.environ.get('TEST_STATE_PATH'), suffix)
|
||||
|
||||
with open(state_path, 'w') as f:
|
||||
json.dump({'env': dict(os.environ), 'args': argv}, f)
|
||||
|
||||
if 'TEST_RESPONSE' not in os.environ:
|
||||
return
|
||||
|
||||
response = json.loads(os.environ.get('TEST_RESPONSE'))
|
||||
for k, v in response.get('files', {}).iteritems():
|
||||
open(k, 'w')
|
||||
with open(k, 'w') as f:
|
||||
f.write(v)
|
||||
|
||||
sys.stdout.write(response.get('stdout', ''))
|
||||
sys.stderr.write(response.get('stderr', ''))
|
||||
return response.get('returncode', 0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1 +0,0 @@
|
|||
../../hot/software-config/elements/heat-config/bin/heat-config-notify
|
|
@ -1,60 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
'''
|
||||
A fake heat-config hook for unit testing the 55-heat-config
|
||||
os-refresh-config script.
|
||||
'''
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
c = json.load(sys.stdin)
|
||||
|
||||
inputs = {}
|
||||
for input in c['inputs']:
|
||||
inputs[input['name']] = input.get('value', '')
|
||||
|
||||
response = {}
|
||||
|
||||
# populate outputs from inputs of the same name
|
||||
for output in c.get('outputs') or []:
|
||||
output_name = output['name']
|
||||
response[output_name] = inputs.get(output_name, '')
|
||||
|
||||
# populate deploy outputs from the inputs of the same name
|
||||
response.update({
|
||||
'deploy_stdout': inputs.get('deploy_stdout', 'stdout'),
|
||||
'deploy_stderr': inputs.get('deploy_stderr', 'stderr'),
|
||||
'deploy_status_code': inputs.get('deploy_status_code', '0'),
|
||||
})
|
||||
|
||||
# write out stdin and stdout json for test asserts
|
||||
stdin_path = os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)), '%s.stdin' % c['group'])
|
||||
stdout_path = os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)), '%s.stdout' % c['group'])
|
||||
|
||||
with open(stdin_path, 'w') as f:
|
||||
json.dump(c, f)
|
||||
f.flush()
|
||||
with open(stdout_path, 'w') as f:
|
||||
json.dump(response, f)
|
||||
f.flush()
|
||||
json.dump(response, sys.stdout)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
|
@ -1 +0,0 @@
|
|||
../../hot/software-config/elements/heat-config-kubelet/install.d/hook-kubelet.py
|
|
@ -1,272 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import fixtures
|
||||
from testtools import matchers
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
|
||||
class HeatConfigTest(common.RunScriptTest):
|
||||
|
||||
fake_hooks = ['cfn-init', 'chef', 'puppet', 'salt', 'script',
|
||||
'apply-config', 'hiera', 'json-file']
|
||||
|
||||
data = [
|
||||
{
|
||||
'id': '1111',
|
||||
'group': 'chef',
|
||||
'inputs': [{
|
||||
'name': 'deploy_signal_id',
|
||||
'value': 'mock://192.0.2.2/foo'
|
||||
}],
|
||||
'config': 'one'
|
||||
}, {
|
||||
'id': '2222',
|
||||
'group': 'cfn-init',
|
||||
'inputs': [],
|
||||
'config': 'two'
|
||||
}, {
|
||||
'id': '3333',
|
||||
'group': 'salt',
|
||||
'inputs': [{'name': 'foo', 'value': 'bar'}],
|
||||
'outputs': [{'name': 'foo'}],
|
||||
'config': 'three'
|
||||
}, {
|
||||
'id': '4444',
|
||||
'group': 'puppet',
|
||||
'inputs': [],
|
||||
'config': 'four'
|
||||
}, {
|
||||
'id': '5555',
|
||||
'group': 'script',
|
||||
'inputs': [{
|
||||
'name': 'deploy_status_code', 'value': '-1'
|
||||
}, {
|
||||
'name': 'deploy_stderr', 'value': 'A bad thing happened'
|
||||
}, {
|
||||
'name': 'deploy_signal_id',
|
||||
'value': 'mock://192.0.2.3/foo'
|
||||
}],
|
||||
'config': 'five'
|
||||
}, {
|
||||
'id': '6666',
|
||||
'group': 'apply-config',
|
||||
'inputs': [{'name': 'foo', 'value': 'bar'}],
|
||||
'config': 'six'
|
||||
}, {
|
||||
'id': '7777',
|
||||
'group': 'hiera',
|
||||
'inputs': [],
|
||||
'config': 'seven'
|
||||
}, {
|
||||
'id': '8888',
|
||||
'group': 'json-file',
|
||||
'inputs': [],
|
||||
'config': 'eight'
|
||||
}, {
|
||||
'id': '9999',
|
||||
'group': 'no-such-hook',
|
||||
'inputs': [],
|
||||
'config': 'nine'
|
||||
}]
|
||||
|
||||
outputs = {
|
||||
'chef': {
|
||||
'deploy_status_code': '0',
|
||||
'deploy_stderr': 'stderr',
|
||||
'deploy_stdout': 'stdout'
|
||||
},
|
||||
'cfn-init': {
|
||||
'deploy_status_code': '0',
|
||||
'deploy_stderr': 'stderr',
|
||||
'deploy_stdout': 'stdout'
|
||||
},
|
||||
'salt': {
|
||||
'deploy_status_code': '0',
|
||||
'deploy_stderr': 'stderr',
|
||||
'deploy_stdout': 'stdout',
|
||||
'foo': 'bar'
|
||||
},
|
||||
'puppet': {
|
||||
'deploy_status_code': '0',
|
||||
'deploy_stderr': 'stderr',
|
||||
'deploy_stdout': 'stdout'
|
||||
},
|
||||
'script': {
|
||||
'deploy_status_code': '-1',
|
||||
'deploy_stderr': 'A bad thing happened',
|
||||
'deploy_stdout': 'stdout'
|
||||
},
|
||||
'hiera': {
|
||||
'deploy_status_code': '0',
|
||||
'deploy_stderr': 'stderr',
|
||||
'deploy_stdout': 'stdout'
|
||||
},
|
||||
'json-file': {
|
||||
'deploy_status_code': '0',
|
||||
'deploy_stderr': 'stderr',
|
||||
'deploy_stdout': 'stdout'
|
||||
},
|
||||
'apply-config': {
|
||||
'deploy_status_code': '0',
|
||||
'deploy_stderr': 'stderr',
|
||||
'deploy_stdout': 'stdout'
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(HeatConfigTest, self).setUp()
|
||||
|
||||
self.fake_hook_path = self.relative_path(__file__, 'hook-fake.py')
|
||||
|
||||
self.heat_config_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements',
|
||||
'heat-config/os-refresh-config/configure.d/55-heat-config')
|
||||
|
||||
self.hooks_dir = self.useFixture(fixtures.TempDir())
|
||||
self.deployed_dir = self.useFixture(fixtures.TempDir())
|
||||
|
||||
with open(self.fake_hook_path) as f:
|
||||
fake_hook = f.read()
|
||||
|
||||
for hook in self.fake_hooks:
|
||||
hook_name = self.hooks_dir.join(hook)
|
||||
with open(hook_name, 'w') as f:
|
||||
os.utime(hook_name, None)
|
||||
f.write(fake_hook)
|
||||
f.flush()
|
||||
os.chmod(hook_name, 0o755)
|
||||
self.env = os.environ.copy()
|
||||
|
||||
def write_config_file(self, data):
|
||||
config_file = tempfile.NamedTemporaryFile()
|
||||
config_file.write(json.dumps(data))
|
||||
config_file.flush()
|
||||
return config_file
|
||||
|
||||
def run_heat_config(self, data):
|
||||
with self.write_config_file(data) as config_file:
|
||||
|
||||
self.env.update({
|
||||
'HEAT_CONFIG_HOOKS': self.hooks_dir.join(),
|
||||
'HEAT_CONFIG_DEPLOYED': self.deployed_dir.join(),
|
||||
'HEAT_SHELL_CONFIG': config_file.name
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.heat_config_path], self.env)
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
|
||||
def test_hooks_exist(self):
|
||||
self.assertThat(
|
||||
self.hooks_dir.join('no-such-hook'),
|
||||
matchers.Not(matchers.FileExists()))
|
||||
|
||||
for hook in self.fake_hooks:
|
||||
hook_path = self.hooks_dir.join(hook)
|
||||
self.assertThat(hook_path, matchers.FileExists())
|
||||
|
||||
def test_run_heat_config(self):
|
||||
|
||||
self.run_heat_config(self.data)
|
||||
|
||||
for config in self.data:
|
||||
hook = config['group']
|
||||
stdin_path = self.hooks_dir.join('%s.stdin' % hook)
|
||||
stdout_path = self.hooks_dir.join('%s.stdout' % hook)
|
||||
deployed_file = self.deployed_dir.join('%s.json' % config['id'])
|
||||
|
||||
if hook == 'no-such-hook':
|
||||
self.assertThat(
|
||||
stdin_path, matchers.Not(matchers.FileExists()))
|
||||
self.assertThat(
|
||||
stdout_path, matchers.Not(matchers.FileExists()))
|
||||
continue
|
||||
|
||||
self.assertThat(stdin_path, matchers.FileExists())
|
||||
self.assertThat(stdout_path, matchers.FileExists())
|
||||
|
||||
# parsed stdin should match the config item
|
||||
self.assertEqual(config,
|
||||
self.json_from_file(stdin_path))
|
||||
|
||||
# parsed stdin should match the written deployed file
|
||||
self.assertEqual(config,
|
||||
self.json_from_file(deployed_file))
|
||||
|
||||
self.assertEqual(self.outputs[hook],
|
||||
self.json_from_file(stdout_path))
|
||||
|
||||
# clean up files in preparation for second run
|
||||
os.remove(stdin_path)
|
||||
os.remove(stdout_path)
|
||||
|
||||
# run again with no changes, assert no new files
|
||||
self.run_heat_config(self.data)
|
||||
for config in self.data:
|
||||
hook = config['group']
|
||||
stdin_path = self.hooks_dir.join('%s.stdin' % hook)
|
||||
stdout_path = self.hooks_dir.join('%s.stdout' % hook)
|
||||
|
||||
self.assertThat(
|
||||
stdin_path, matchers.Not(matchers.FileExists()))
|
||||
self.assertThat(
|
||||
stdout_path, matchers.Not(matchers.FileExists()))
|
||||
|
||||
# run again changing the puppet config
|
||||
data = copy.deepcopy(self.data)
|
||||
for config in data:
|
||||
if config['id'] == '4444':
|
||||
config['id'] = '44444444'
|
||||
self.run_heat_config(data)
|
||||
for config in self.data:
|
||||
hook = config['group']
|
||||
stdin_path = self.hooks_dir.join('%s.stdin' % hook)
|
||||
stdout_path = self.hooks_dir.join('%s.stdout' % hook)
|
||||
|
||||
if hook == 'puppet':
|
||||
self.assertThat(stdin_path, matchers.FileExists())
|
||||
self.assertThat(stdout_path, matchers.FileExists())
|
||||
else:
|
||||
self.assertThat(
|
||||
stdin_path, matchers.Not(matchers.FileExists()))
|
||||
self.assertThat(
|
||||
stdout_path, matchers.Not(matchers.FileExists()))
|
||||
|
||||
# run again with a different deployed_dir
|
||||
old_deployed_dir = self.deployed_dir
|
||||
self.env['HEAT_CONFIG_DEPLOYED_OLD'] = old_deployed_dir.join()
|
||||
self.deployed_dir = self.useFixture(fixtures.TempDir())
|
||||
# make sure the new deployed_dir doesn't exist to trigger the migration
|
||||
shutil.rmtree(self.deployed_dir.join())
|
||||
|
||||
self.run_heat_config(data)
|
||||
for config in self.data:
|
||||
hook = config['group']
|
||||
if hook == 'no-such-hook':
|
||||
continue
|
||||
deployed_file = self.deployed_dir.join('%s.json' % config['id'])
|
||||
old_deployed_file = old_deployed_dir.join('%s.json' % config['id'])
|
||||
self.assertEqual(config,
|
||||
self.json_from_file(deployed_file))
|
||||
self.assertThat(
|
||||
old_deployed_file, matchers.Not(matchers.FileExists()))
|
|
@ -1,117 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import fixtures
|
||||
import yaml
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
|
||||
class HeatConfigDockerComposeORCTest(common.RunScriptTest):
|
||||
|
||||
fake_hooks = ['docker-compose']
|
||||
|
||||
data = [
|
||||
{
|
||||
"name": "abcdef001",
|
||||
"group": "docker-compose",
|
||||
"inputs": {},
|
||||
"config": {
|
||||
"web": {
|
||||
"image": "nginx",
|
||||
"links": [
|
||||
"db"
|
||||
],
|
||||
"ports": [
|
||||
"8000:8000"
|
||||
]
|
||||
},
|
||||
"db": {
|
||||
"image": "redis"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "abcdef002",
|
||||
"group": "docker-compose",
|
||||
"inputs": {},
|
||||
"config": {
|
||||
"web": {
|
||||
"image": "httpd",
|
||||
"links": [
|
||||
"db"
|
||||
],
|
||||
"ports": [
|
||||
"80:8001"
|
||||
]
|
||||
},
|
||||
"db": {
|
||||
"image": "postgress"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(HeatConfigDockerComposeORCTest, self).setUp()
|
||||
|
||||
self.fake_hook_path = self.relative_path(__file__, 'hook-fake.py')
|
||||
self.heat_config_docker_compose_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements',
|
||||
'heat-config-docker-compose/os-refresh-config/configure.d/'
|
||||
'50-heat-config-docker-compose')
|
||||
|
||||
self.docker_compose_dir = self.useFixture(fixtures.TempDir())
|
||||
|
||||
with open(self.fake_hook_path) as f:
|
||||
fake_hook = f.read()
|
||||
|
||||
for hook in self.fake_hooks:
|
||||
hook_name = self.docker_compose_dir.join(hook)
|
||||
with open(hook_name, 'w') as f:
|
||||
os.utime(hook_name, None)
|
||||
f.write(fake_hook)
|
||||
f.flush()
|
||||
os.chmod(hook_name, 0o755)
|
||||
|
||||
def write_config_file(self, data):
|
||||
config_file = tempfile.NamedTemporaryFile()
|
||||
config_file.write(json.dumps(data))
|
||||
config_file.flush()
|
||||
return config_file
|
||||
|
||||
def test_run_heat_config(self):
|
||||
with self.write_config_file(self.data) as config_file:
|
||||
env = os.environ.copy()
|
||||
env.update({
|
||||
'HEAT_DOCKER_COMPOSE_WORKING': self.docker_compose_dir.join(),
|
||||
'HEAT_SHELL_CONFIG': config_file.name
|
||||
})
|
||||
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.heat_config_docker_compose_path], env)
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
|
||||
compose_yml = self.docker_compose_dir.join(
|
||||
'abcdef001/docker-compose.yml')
|
||||
with open(compose_yml) as f:
|
||||
self.assertEqual(yaml.safe_dump(
|
||||
self.data[0].get('config'),
|
||||
default_flow_style=False), f.read())
|
|
@ -1,147 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import fixtures
|
||||
from testtools import matchers
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
|
||||
class HeatConfigKubeletORCTest(common.RunScriptTest):
|
||||
|
||||
fake_hooks = ['kubelet']
|
||||
|
||||
data = [{
|
||||
"id": "abcdef001",
|
||||
"group": "kubelet",
|
||||
"name": "mysql",
|
||||
"config": {
|
||||
"version": "v1beta2",
|
||||
"volumes": [{
|
||||
"name": "mariadb-data"
|
||||
}],
|
||||
"containers": [{
|
||||
"image": "mariadb_image",
|
||||
"volumeMounts": [{
|
||||
"mountPath": "/var/lib/mysql",
|
||||
"name": "mariadb-data"
|
||||
}],
|
||||
"name": "mariadb",
|
||||
"env": [{
|
||||
"name": "DB_ROOT_PASSWORD",
|
||||
"value": "mariadb_password"
|
||||
}],
|
||||
"ports": [{
|
||||
"containerPort": 3306
|
||||
}]
|
||||
}]}
|
||||
}, {
|
||||
"id": "abcdef002",
|
||||
"group": "kubelet",
|
||||
"name": "rabbitmq",
|
||||
"config": {
|
||||
"version": "v1beta2",
|
||||
"containers": [{
|
||||
"image": "rabbitmq_image",
|
||||
"name": "rabbitmq",
|
||||
"ports": [{
|
||||
"containerPort": 5672
|
||||
}]
|
||||
}]
|
||||
}
|
||||
}, {
|
||||
"id": "abcdef003",
|
||||
"group": "kubelet",
|
||||
"name": "heat_api_engine",
|
||||
"config": {
|
||||
"version": "v1beta2",
|
||||
"containers": [{
|
||||
"image": "heat_engine_image",
|
||||
"name": "heat-engine",
|
||||
"env": [{
|
||||
"name": "DB_ROOT_PASSWORD",
|
||||
"value": "mariadb_password"
|
||||
}, {
|
||||
"name": "HEAT_DB_PASSWORD",
|
||||
"value": "heatdb_password"
|
||||
}, {
|
||||
"name": "HEAT_KEYSTONE_PASSWORD",
|
||||
"value": "password"
|
||||
}]
|
||||
}, {
|
||||
"image": "heat_api_image",
|
||||
"name": "heat-api",
|
||||
"ports": [{
|
||||
"containerPort": 8004
|
||||
}]
|
||||
}]
|
||||
}
|
||||
}]
|
||||
|
||||
def setUp(self):
|
||||
super(HeatConfigKubeletORCTest, self).setUp()
|
||||
|
||||
self.fake_hook_path = self.relative_path(__file__, 'hook-fake.py')
|
||||
|
||||
self.heat_config_kubelet_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements',
|
||||
'heat-config-kubelet/os-refresh-config/configure.d/'
|
||||
'50-heat-config-kubelet')
|
||||
|
||||
self.manifests_dir = self.useFixture(fixtures.TempDir())
|
||||
|
||||
with open(self.fake_hook_path) as f:
|
||||
fake_hook = f.read()
|
||||
|
||||
for hook in self.fake_hooks:
|
||||
hook_name = self.manifests_dir.join(hook)
|
||||
with open(hook_name, 'w') as f:
|
||||
os.utime(hook_name, None)
|
||||
f.write(fake_hook)
|
||||
f.flush()
|
||||
os.chmod(hook_name, 0o755)
|
||||
|
||||
def write_config_file(self, data):
|
||||
config_file = tempfile.NamedTemporaryFile()
|
||||
config_file.write(json.dumps(data))
|
||||
config_file.flush()
|
||||
return config_file
|
||||
|
||||
def test_run_heat_config(self):
|
||||
|
||||
with self.write_config_file(self.data) as config_file:
|
||||
|
||||
env = os.environ.copy()
|
||||
env.update({
|
||||
'HEAT_KUBELET_MANIFESTS': self.manifests_dir.join(),
|
||||
'HEAT_SHELL_CONFIG': config_file.name
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.heat_config_kubelet_path], env)
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
|
||||
for config in self.data:
|
||||
manifest_name = '%s.json' % config['id']
|
||||
manifest_path = self.manifests_dir.join(manifest_name)
|
||||
self.assertThat(manifest_path, matchers.FileExists())
|
||||
|
||||
# manifest file should match manifest config
|
||||
self.assertEqual(config['config'],
|
||||
self.json_from_file(manifest_path))
|
|
@ -1,209 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import cStringIO
|
||||
import json
|
||||
import tempfile
|
||||
|
||||
import fixtures
|
||||
import mock
|
||||
|
||||
from tests.software_config import common
|
||||
from tests.software_config import heat_config_notify as hcn
|
||||
|
||||
|
||||
class HeatConfigNotifyTest(common.RunScriptTest):
|
||||
|
||||
data_signal_id = {
|
||||
'id': '5555',
|
||||
'group': 'script',
|
||||
'inputs': [{
|
||||
'name': 'deploy_signal_id',
|
||||
'value': 'mock://192.0.2.3/foo'
|
||||
}],
|
||||
'config': 'five'
|
||||
}
|
||||
|
||||
data_signal_id_put = {
|
||||
'id': '5555',
|
||||
'group': 'script',
|
||||
'inputs': [{
|
||||
'name': 'deploy_signal_id',
|
||||
'value': 'mock://192.0.2.3/foo'
|
||||
}, {
|
||||
'name': 'deploy_signal_verb',
|
||||
'value': 'PUT'
|
||||
}],
|
||||
'config': 'five'
|
||||
}
|
||||
|
||||
data_heat_signal = {
|
||||
'id': '5555',
|
||||
'group': 'script',
|
||||
'inputs': [{
|
||||
'name': 'deploy_auth_url',
|
||||
'value': 'mock://192.0.2.3/auth'
|
||||
}, {
|
||||
'name': 'deploy_user_id',
|
||||
'value': 'aaaa'
|
||||
}, {
|
||||
'name': 'deploy_password',
|
||||
'value': 'password'
|
||||
}, {
|
||||
'name': 'deploy_project_id',
|
||||
'value': 'bbbb'
|
||||
}, {
|
||||
'name': 'deploy_stack_id',
|
||||
'value': 'cccc'
|
||||
}, {
|
||||
'name': 'deploy_resource_name',
|
||||
'value': 'the_resource'
|
||||
}],
|
||||
'config': 'five'
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(HeatConfigNotifyTest, self).setUp()
|
||||
self.deployed_dir = self.useFixture(fixtures.TempDir())
|
||||
hcn.init_logging = mock.MagicMock()
|
||||
|
||||
def write_config_file(self, data):
|
||||
config_file = tempfile.NamedTemporaryFile()
|
||||
config_file.write(json.dumps(data))
|
||||
config_file.flush()
|
||||
return config_file
|
||||
|
||||
def test_notify_missing_file(self):
|
||||
|
||||
signal_data = json.dumps({'foo': 'bar'})
|
||||
stdin = cStringIO.StringIO(signal_data)
|
||||
|
||||
with self.write_config_file(self.data_signal_id) as config_file:
|
||||
config_file_name = config_file.name
|
||||
|
||||
self.assertEqual(
|
||||
1, hcn.main(['heat-config-notify', config_file_name], stdin))
|
||||
|
||||
def test_notify_missing_file_arg(self):
|
||||
|
||||
signal_data = json.dumps({'foo': 'bar'})
|
||||
stdin = cStringIO.StringIO(signal_data)
|
||||
|
||||
self.assertEqual(
|
||||
1, hcn.main(['heat-config-notify'], stdin))
|
||||
|
||||
def test_notify_signal_id(self):
|
||||
requests = mock.MagicMock()
|
||||
hcn.requests = requests
|
||||
|
||||
requests.post.return_value = '[200]'
|
||||
|
||||
signal_data = json.dumps({'foo': 'bar'})
|
||||
stdin = cStringIO.StringIO(signal_data)
|
||||
|
||||
with self.write_config_file(self.data_signal_id) as config_file:
|
||||
self.assertEqual(
|
||||
0, hcn.main(['heat-config-notify', config_file.name], stdin))
|
||||
|
||||
requests.post.assert_called_once_with(
|
||||
'mock://192.0.2.3/foo',
|
||||
data=signal_data,
|
||||
headers={'content-type': 'application/json'})
|
||||
|
||||
def test_notify_signal_id_put(self):
|
||||
requests = mock.MagicMock()
|
||||
hcn.requests = requests
|
||||
|
||||
requests.post.return_value = '[200]'
|
||||
|
||||
signal_data = json.dumps({'foo': 'bar'})
|
||||
stdin = cStringIO.StringIO(signal_data)
|
||||
|
||||
with self.write_config_file(self.data_signal_id_put) as config_file:
|
||||
self.assertEqual(
|
||||
0, hcn.main(['heat-config-notify', config_file.name], stdin))
|
||||
|
||||
requests.put.assert_called_once_with(
|
||||
'mock://192.0.2.3/foo',
|
||||
data=signal_data,
|
||||
headers={'content-type': 'application/json'})
|
||||
|
||||
def test_notify_signal_id_empty_data(self):
|
||||
requests = mock.MagicMock()
|
||||
hcn.requests = requests
|
||||
|
||||
requests.post.return_value = '[200]'
|
||||
|
||||
stdin = cStringIO.StringIO()
|
||||
|
||||
with self.write_config_file(self.data_signal_id) as config_file:
|
||||
self.assertEqual(
|
||||
0, hcn.main(['heat-config-notify', config_file.name], stdin))
|
||||
|
||||
requests.post.assert_called_once_with(
|
||||
'mock://192.0.2.3/foo',
|
||||
data='{}',
|
||||
headers={'content-type': 'application/json'})
|
||||
|
||||
def test_notify_signal_id_invalid_json_data(self):
|
||||
requests = mock.MagicMock()
|
||||
hcn.requests = requests
|
||||
|
||||
requests.post.return_value = '[200]'
|
||||
|
||||
stdin = cStringIO.StringIO('{{{"hi')
|
||||
|
||||
with self.write_config_file(self.data_signal_id) as config_file:
|
||||
self.assertEqual(
|
||||
0, hcn.main(['heat-config-notify', config_file.name], stdin))
|
||||
|
||||
requests.post.assert_called_once_with(
|
||||
'mock://192.0.2.3/foo',
|
||||
data='{}',
|
||||
headers={'content-type': 'application/json'})
|
||||
|
||||
def test_notify_heat_signal(self):
|
||||
ksclient = mock.MagicMock()
|
||||
hcn.ksclient = ksclient
|
||||
ks = mock.MagicMock()
|
||||
ksclient.Client.return_value = ks
|
||||
|
||||
heatclient = mock.MagicMock()
|
||||
hcn.heatclient = heatclient
|
||||
heat = mock.MagicMock()
|
||||
heatclient.Client.return_value = heat
|
||||
|
||||
signal_data = json.dumps({'foo': 'bar'})
|
||||
stdin = cStringIO.StringIO(signal_data)
|
||||
|
||||
ks.service_catalog.url_for.return_value = 'mock://192.0.2.3/heat'
|
||||
heat.resources.signal.return_value = 'all good'
|
||||
|
||||
with self.write_config_file(self.data_heat_signal) as config_file:
|
||||
self.assertEqual(
|
||||
0, hcn.main(['heat-config-notify', config_file.name], stdin))
|
||||
|
||||
ksclient.Client.assert_called_once_with(
|
||||
auth_url='mock://192.0.2.3/auth',
|
||||
user_id='aaaa',
|
||||
password='password',
|
||||
project_id='bbbb')
|
||||
ks.service_catalog.url_for.assert_called_once_with(
|
||||
service_type='orchestration', endpoint_type='publicURL')
|
||||
|
||||
heatclient.Client.assert_called_once_with(
|
||||
'1', 'mock://192.0.2.3/heat', token=ks.auth_token)
|
||||
heat.resources.signal.assert_called_once_with(
|
||||
'cccc',
|
||||
'the_resource',
|
||||
data={'foo': 'bar'})
|
|
@ -1,225 +0,0 @@
|
|||
# Copyright 2015 NEC Corporation. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import fixtures
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
|
||||
class HookAnsibleTest(common.RunScriptTest):
|
||||
|
||||
data = {
|
||||
'id': '1234',
|
||||
'name': 'fake_resource_name',
|
||||
'group': 'ansible',
|
||||
'options': {},
|
||||
'inputs': [
|
||||
{'name': 'foo', 'value': 'bar'},
|
||||
{'name': 'another', 'value': 'input'}
|
||||
],
|
||||
'config': 'the ansible playbook'
|
||||
}
|
||||
|
||||
data_tags = {
|
||||
'id': '1234',
|
||||
'name': 'fake_resource_name_tags',
|
||||
'group': 'ansible',
|
||||
'options': {'tags': 'abc,def'},
|
||||
'inputs': [
|
||||
{'name': 'foo', 'value': 'bar'},
|
||||
{'name': 'another', 'value': 'input'}
|
||||
],
|
||||
'config': 'the ansible playbook'
|
||||
}
|
||||
|
||||
data_modulepath = data.copy()
|
||||
data_modulepath.update({
|
||||
'options': {'modulepath': '/opt/ansible:/usr/share/ansible'},
|
||||
})
|
||||
|
||||
data_tags_modulepath = data.copy()
|
||||
data_tags_modulepath.update({
|
||||
'options': {'modulepath': '/opt/ansible:/usr/share/ansible',
|
||||
'tags': 'abc,def'},
|
||||
})
|
||||
|
||||
def setUp(self):
|
||||
super(HookAnsibleTest, self).setUp()
|
||||
self.hook_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements',
|
||||
'heat-config-ansible/install.d/hook-ansible.py')
|
||||
|
||||
self.fake_tool_path = self.relative_path(
|
||||
__file__,
|
||||
'config-tool-fake.py')
|
||||
|
||||
self.working_dir = self.useFixture(fixtures.TempDir())
|
||||
self.outputs_dir = self.useFixture(fixtures.TempDir())
|
||||
self.test_state_path = self.outputs_dir.join('test_state.json')
|
||||
self.test_inventory = "localhost test_var=123,"
|
||||
|
||||
self.env = os.environ.copy()
|
||||
self.env.update({
|
||||
'HEAT_ANSIBLE_WORKING': self.working_dir.join(),
|
||||
'HEAT_ANSIBLE_OUTPUTS': self.outputs_dir.join(),
|
||||
'HEAT_ANSIBLE_CMD': self.fake_tool_path,
|
||||
'TEST_STATE_PATH': self.test_state_path
|
||||
})
|
||||
|
||||
def test_hook(self):
|
||||
self._hook_run()
|
||||
|
||||
def test_hook_tags(self):
|
||||
self._hook_run(data=self.data_tags, options=['--tags', 'abc,def'])
|
||||
|
||||
def test_hook_modulepath(self):
|
||||
self._hook_run(data=self.data_modulepath,
|
||||
options=['--module-path',
|
||||
'/opt/ansible:/usr/share/ansible'])
|
||||
|
||||
def test_hook_tags_modulepath(self):
|
||||
self._hook_run(data=self.data_tags_modulepath,
|
||||
options=['--module-path',
|
||||
'/opt/ansible:/usr/share/ansible',
|
||||
'--tags', 'abc,def'])
|
||||
|
||||
def _hook_run(self, data=None, options=None):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': 'ansible success',
|
||||
'stderr': 'thing happened',
|
||||
}),
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(data or self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
self.assertEqual({
|
||||
'deploy_stdout': 'ansible success',
|
||||
'deploy_stderr': 'thing happened',
|
||||
'deploy_status_code': 0,
|
||||
}, json.loads(stdout))
|
||||
|
||||
state = self.json_from_file(self.test_state_path)
|
||||
ansible_playbook = self.working_dir.join('1234_playbook.yaml')
|
||||
vars_filename = self.working_dir.join('1234_variables.json')
|
||||
|
||||
expected_args = [
|
||||
self.fake_tool_path,
|
||||
'-i',
|
||||
'localhost,']
|
||||
if options:
|
||||
expected_args += options
|
||||
expected_args += [
|
||||
ansible_playbook,
|
||||
'--extra-vars']
|
||||
expected_args.append('@%s' % vars_filename)
|
||||
self.assertEqual(expected_args, state['args'])
|
||||
|
||||
# Write 'variables' to file
|
||||
variables = self.json_from_file(vars_filename)
|
||||
self.assertEqual('bar', variables['foo'])
|
||||
self.assertEqual('input', variables['another'])
|
||||
self.assertEqual(self.outputs_dir.join('1234'),
|
||||
variables['heat_outputs_path'])
|
||||
|
||||
# Write the executable 'config' to file
|
||||
with open(ansible_playbook) as f:
|
||||
self.assertEqual('the ansible playbook', f.read())
|
||||
|
||||
def test_hook_inventory(self):
|
||||
|
||||
self.env.update({
|
||||
'HEAT_ANSIBLE_INVENTORY': self.test_inventory,
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': 'ansible success',
|
||||
'stderr': 'thing happened',
|
||||
}),
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
self.assertEqual({
|
||||
'deploy_stdout': 'ansible success',
|
||||
'deploy_stderr': 'thing happened',
|
||||
'deploy_status_code': 0,
|
||||
}, json.loads(stdout))
|
||||
|
||||
state = self.json_from_file(self.test_state_path)
|
||||
ansible_playbook = self.working_dir.join('1234_playbook.yaml')
|
||||
vars_filename = self.working_dir.join('1234_variables.json')
|
||||
|
||||
self.assertEqual(
|
||||
[
|
||||
self.fake_tool_path,
|
||||
'-i',
|
||||
self.test_inventory,
|
||||
ansible_playbook,
|
||||
'--extra-vars',
|
||||
'@%s' % vars_filename
|
||||
],
|
||||
state['args'])
|
||||
|
||||
def test_hook_ansible_failed(self):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': 'ansible failed',
|
||||
'stderr': 'bad thing happened',
|
||||
'returncode': 4
|
||||
}),
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
self.assertEqual({
|
||||
'deploy_stdout': 'ansible failed',
|
||||
'deploy_stderr': 'bad thing happened',
|
||||
'deploy_status_code': 4,
|
||||
}, json.loads(stdout))
|
||||
|
||||
state = self.json_from_file(self.test_state_path)
|
||||
ansible_playbook = self.working_dir.join('1234_playbook.yaml')
|
||||
vars_filename = self.working_dir.join('1234_variables.json')
|
||||
|
||||
self.assertEqual(
|
||||
[
|
||||
self.fake_tool_path,
|
||||
'-i',
|
||||
'localhost,',
|
||||
ansible_playbook,
|
||||
'--extra-vars',
|
||||
'@%s' % vars_filename
|
||||
],
|
||||
state['args'])
|
||||
|
||||
# Write 'variables' to file
|
||||
variables = self.json_from_file(vars_filename)
|
||||
self.assertEqual('bar', variables['foo'])
|
||||
self.assertEqual('input', variables['another'])
|
||||
self.assertEqual(self.outputs_dir.join('1234'),
|
||||
variables['heat_outputs_path'])
|
||||
|
||||
# Write the executable 'config' to file
|
||||
with open(ansible_playbook) as f:
|
||||
self.assertEqual('the ansible playbook', f.read())
|
|
@ -1,81 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import fixtures
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import yaml
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
log = logging.getLogger('test_hook_apply_config')
|
||||
|
||||
|
||||
class HookApplyConfigTest(common.RunScriptTest):
|
||||
|
||||
data = {
|
||||
'id': 'test_apply_config',
|
||||
'name': 'fake_resource_name',
|
||||
'group': 'apply-config',
|
||||
'config': {'foo': 'bar'}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(HookApplyConfigTest, self).setUp()
|
||||
self.hook_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements',
|
||||
'heat-config-apply-config/install.d/hook-apply-config.py')
|
||||
|
||||
self.metadata_dir = self.useFixture(fixtures.TempDir())
|
||||
self.templates_dir = self.useFixture(fixtures.TempDir())
|
||||
tmp_dir = tempfile.NamedTemporaryFile(mode='w', delete=False).name
|
||||
os.unlink(tmp_dir)
|
||||
self.tmp_file = os.path.basename(tmp_dir)
|
||||
self.out_dir = self.templates_dir.join('tmp')
|
||||
|
||||
self.metadata = self.metadata_dir.join(self.tmp_file)
|
||||
|
||||
self.env = os.environ.copy()
|
||||
self.env.update({
|
||||
'OS_CONFIG_FILES': self.metadata,
|
||||
'OS_CONFIG_APPLIER_TEMPLATES': self.templates_dir.join(),
|
||||
})
|
||||
|
||||
# our fake metadata file
|
||||
with open(self.metadata, "w+") as md:
|
||||
md.write(json.dumps({'foo': 'bar'}))
|
||||
|
||||
# This is our fake template root we use to verify os-apply-config
|
||||
# works as expected
|
||||
os.mkdir(self.out_dir)
|
||||
with open(os.path.join(self.out_dir, self.tmp_file), "w+") as template:
|
||||
template.write("foo={{foo}}")
|
||||
|
||||
def test_hook(self):
|
||||
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
ret = yaml.safe_load(stdout)
|
||||
self.assertIsNotNone(ret['deploy_stderr'])
|
||||
self.assertEqual('', ret['deploy_stdout'])
|
||||
self.assertEqual(0, ret['deploy_status_code'])
|
||||
f = os.path.join('/tmp', self.tmp_file)
|
||||
with open(f) as out_file:
|
||||
self.assertEqual('foo=bar', out_file.read())
|
||||
os.unlink(f)
|
|
@ -1,115 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import fixtures
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
|
||||
class HookAtomicTest(common.RunScriptTest):
|
||||
data = {
|
||||
"id": "abcdef001",
|
||||
"group": "atomic",
|
||||
"inputs": [],
|
||||
"config": {
|
||||
"command": "install",
|
||||
"image": "imain/atomic-install-rabbitmq"
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(HookAtomicTest, self).setUp()
|
||||
self.hook_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/heat-container-agent',
|
||||
'scripts/hooks/atomic')
|
||||
|
||||
self.fake_tool_path = self.relative_path(
|
||||
__file__,
|
||||
'config-tool-fake.py')
|
||||
|
||||
self.working_dir = self.useFixture(fixtures.TempDir())
|
||||
self.outputs_dir = self.useFixture(fixtures.TempDir())
|
||||
self.test_state_path = self.outputs_dir.join('test_state.json')
|
||||
|
||||
self.env = os.environ.copy()
|
||||
self.env.update({
|
||||
'HEAT_ATOMIC_WORKING': self.working_dir.join(),
|
||||
'HEAT_ATOMIC_CMD': self.fake_tool_path,
|
||||
'TEST_STATE_PATH': self.test_state_path,
|
||||
})
|
||||
|
||||
def test_hook(self):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': 'Downloading xxx',
|
||||
'stderr': ''
|
||||
})
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
|
||||
self.assertEqual({
|
||||
'deploy_stdout': 'Downloading xxx',
|
||||
'deploy_stderr': '',
|
||||
'deploy_status_code': 0
|
||||
}, json.loads(stdout))
|
||||
|
||||
state = self.json_from_file(self.test_state_path)
|
||||
self.assertEqual(
|
||||
[
|
||||
self.fake_tool_path,
|
||||
'install',
|
||||
'imain/atomic-install-rabbitmq',
|
||||
'-n abcdef001',
|
||||
''
|
||||
],
|
||||
state['args'])
|
||||
|
||||
def test_hook_failed(self):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': '',
|
||||
'stderr': 'Container exists...',
|
||||
'returncode': 1
|
||||
})
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
|
||||
self.assertEqual({
|
||||
'deploy_stdout': '',
|
||||
'deploy_stderr': 'Container exists...',
|
||||
'deploy_status_code': 1
|
||||
}, json.loads(stdout))
|
||||
|
||||
state = self.json_from_file(self.test_state_path)
|
||||
self.assertEqual(
|
||||
[
|
||||
self.fake_tool_path,
|
||||
'install',
|
||||
'imain/atomic-install-rabbitmq',
|
||||
'-n abcdef001',
|
||||
''
|
||||
],
|
||||
state['args'])
|
|
@ -1,112 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import fixtures
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
|
||||
class HookCfnInitTest(common.RunScriptTest):
|
||||
|
||||
data = {
|
||||
'group': 'cfn-init',
|
||||
'inputs': [],
|
||||
'config': {'foo': 'bar'}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(HookCfnInitTest, self).setUp()
|
||||
self.hook_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements',
|
||||
'heat-config-cfn-init/install.d/hook-cfn-init.py')
|
||||
|
||||
self.fake_tool_path = self.relative_path(
|
||||
__file__,
|
||||
'config-tool-fake.py')
|
||||
|
||||
self.metadata_dir = self.useFixture(fixtures.TempDir())
|
||||
# use the temp dir to store the fake config tool state too
|
||||
self.test_state_path = self.metadata_dir.join('test_state.json')
|
||||
self.env = os.environ.copy()
|
||||
self.env.update({
|
||||
'HEAT_CFN_INIT_LAST_METADATA_DIR': self.metadata_dir.join(),
|
||||
'HEAT_CFN_INIT_CMD': self.fake_tool_path,
|
||||
'TEST_STATE_PATH': self.test_state_path,
|
||||
})
|
||||
|
||||
def test_hook(self):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': 'cfn-init success',
|
||||
'stderr': 'thing happened'
|
||||
}),
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
self.assertEqual({
|
||||
'deploy_stdout': 'cfn-init success',
|
||||
'deploy_stderr': 'thing happened',
|
||||
'deploy_status_code': 0
|
||||
}, json.loads(stdout))
|
||||
|
||||
# assert last_metadata was written with cfn-init metadata
|
||||
self.assertEqual(
|
||||
{'AWS::CloudFormation::Init': {'foo': 'bar'}},
|
||||
self.json_from_file(self.metadata_dir.join('last_metadata')))
|
||||
|
||||
# assert cfn-init was called with no args
|
||||
self.assertEqual(
|
||||
[self.fake_tool_path],
|
||||
self.json_from_file(self.test_state_path)['args'])
|
||||
|
||||
def test_hook_cfn_init_failed(self):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stderr': 'bad thing happened',
|
||||
'returncode': 1
|
||||
}),
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
self.assertEqual({
|
||||
'deploy_stdout': '',
|
||||
'deploy_stderr': 'bad thing happened',
|
||||
'deploy_status_code': 1
|
||||
}, json.loads(stdout))
|
||||
|
||||
self.assertEqual(
|
||||
{'AWS::CloudFormation::Init': {'foo': 'bar'}},
|
||||
self.json_from_file(self.metadata_dir.join('last_metadata')))
|
||||
|
||||
# assert cfn-init was called with no args
|
||||
self.assertEqual(
|
||||
[self.fake_tool_path],
|
||||
self.json_from_file(self.test_state_path)['args'])
|
||||
|
||||
def test_hook_invalid_json(self):
|
||||
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, "{::::")
|
||||
|
||||
self.assertEqual(1, returncode, stderr)
|
|
@ -1,207 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import imp
|
||||
import json
|
||||
import logging
|
||||
import mock
|
||||
import StringIO
|
||||
import sys
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
log = logging.getLogger('test_hook_chef')
|
||||
|
||||
|
||||
@mock.patch("os.chdir")
|
||||
@mock.patch("os.makedirs")
|
||||
@mock.patch('subprocess.Popen')
|
||||
class HookChefTest(common.RunScriptTest):
|
||||
|
||||
data = {
|
||||
'id': 'fake_stack',
|
||||
'name': 'fake_resource_name',
|
||||
'group': 'chef',
|
||||
'inputs': [
|
||||
{'name': 'fooval', 'value': {'bar': 'baz'}},
|
||||
{'name': 'barval', 'value': {'foo': 'biff'}},
|
||||
{'name': "deploy_server_id", 'value': 'foo'},
|
||||
{'name': "deploy_action", 'value': 'foo'},
|
||||
{'name': "deploy_stack_id", 'value': 'foo'},
|
||||
{'name': "deploy_resource_name", 'value': 'foo'},
|
||||
{'name': "deploy_signal_transport", 'value': 'foo'},
|
||||
{'name': "deploy_signal_id", 'value': 'foo'},
|
||||
{'name': "deploy_signal_verb", 'value': 'foo'}
|
||||
],
|
||||
'options': {},
|
||||
'outputs': [
|
||||
{'name': 'first_output'},
|
||||
{'name': 'second_output'}
|
||||
],
|
||||
'config': None
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(HookChefTest, self).setUp()
|
||||
self.hook_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements',
|
||||
'heat-config-chef/install.d/hook-chef.py')
|
||||
sys.stdin = StringIO.StringIO()
|
||||
sys.stdout = StringIO.StringIO()
|
||||
|
||||
def tearDown(self):
|
||||
super(HookChefTest, self).tearDown()
|
||||
sys.stdin = sys.__stdin__
|
||||
sys.stdout = sys.__stdout__
|
||||
|
||||
def get_module(self):
|
||||
try:
|
||||
imp.acquire_lock()
|
||||
return imp.load_source("hook_chef", self.hook_path)
|
||||
finally:
|
||||
imp.release_lock()
|
||||
|
||||
def test_hook(self, mock_popen, mock_mkdirs, mock_chdir):
|
||||
data = copy.deepcopy(self.data)
|
||||
data['config'] = '["recipe[apache]"]'
|
||||
hook_chef = self.get_module()
|
||||
sys.stdin.write(json.dumps(data))
|
||||
sys.stdin.seek(0)
|
||||
mock_subproc = mock.Mock()
|
||||
mock_popen.return_value = mock_subproc
|
||||
mock_subproc.communicate.return_value = ("out", "err")
|
||||
mock_subproc.returncode = 0
|
||||
with mock.patch("os.fdopen", mock.mock_open()) as mfdopen:
|
||||
with mock.patch("os.open", mock.mock_open()):
|
||||
hook_chef.main(json.dumps(data))
|
||||
exp_node = {
|
||||
'barval': {'foo': 'biff'},
|
||||
'fooval': {u'bar': u'baz'},
|
||||
'run_list': [u'recipe[apache]']
|
||||
}
|
||||
exp_node = json.dumps(exp_node, indent=4)
|
||||
exp_cfg = ("log_level :debug\n"
|
||||
"log_location STDOUT\n"
|
||||
"local_mode true\n"
|
||||
"chef_zero.enabled true\n"
|
||||
"cookbook_path '/var/lib/heat-config/"
|
||||
"heat-config-chef/kitchen/cookbooks'\n"
|
||||
"role_path '/var/lib/heat-config/"
|
||||
"heat-config-chef/kitchen/roles'\n"
|
||||
"environment_path '/var/lib/heat-config/"
|
||||
"heat-config-chef/kitchen/environments'\n"
|
||||
"node_path '/var/lib/heat-config/"
|
||||
"heat-config-chef/node'")
|
||||
mfdopen.return_value.write.assert_any_call(exp_cfg)
|
||||
mfdopen.return_value.write.assert_any_call(exp_node)
|
||||
calls = [
|
||||
mock.call(['hostname', '-f'], env=mock.ANY, stderr=mock.ANY,
|
||||
stdout=mock.ANY),
|
||||
mock.call([
|
||||
'chef-client', '-z', '--config',
|
||||
'/var/lib/heat-config/heat-config-chef/client.rb', '-j',
|
||||
'/var/lib/heat-config/heat-config-chef/node/out.json'],
|
||||
env=mock.ANY, stderr=mock.ANY, stdout=mock.ANY)
|
||||
]
|
||||
mock_popen.assert_has_calls(calls, any_order=True)
|
||||
self.assertEqual({"deploy_status_code": 0,
|
||||
"deploy_stdout": "out",
|
||||
"deploy_stderr": "err"},
|
||||
json.loads(sys.stdout.getvalue()))
|
||||
|
||||
def test_hook_with_kitchen(self, mock_popen, mock_mkdirs, mock_chdir):
|
||||
data = copy.deepcopy(self.data)
|
||||
data['config'] = '["recipe[apache]"]'
|
||||
data['options'] = {
|
||||
"kitchen": "https://github.com/fake.git",
|
||||
"kitchen_path": "/opt/heat/chef/kitchen"
|
||||
}
|
||||
sys.stdin.write(json.dumps(data))
|
||||
hook_chef = self.get_module()
|
||||
sys.stdin.seek(0)
|
||||
mock_subproc = mock.Mock()
|
||||
mock_popen.return_value = mock_subproc
|
||||
mock_subproc.communicate.return_value = ("out", "err")
|
||||
mock_subproc.returncode = 0
|
||||
with mock.patch("os.fdopen", mock.mock_open()) as mfdopen:
|
||||
with mock.patch("os.open", mock.mock_open()):
|
||||
hook_chef.main(json.dumps(data))
|
||||
exp_cfg = ("log_level :debug\n"
|
||||
"log_location STDOUT\n"
|
||||
"local_mode true\n"
|
||||
"chef_zero.enabled true\n"
|
||||
"cookbook_path '/opt/heat/chef/kitchen/"
|
||||
"cookbooks'\n"
|
||||
"role_path '/opt/heat/chef/kitchen/roles'\n"
|
||||
"environment_path '/opt/heat/chef/kitchen/"
|
||||
"environments'\n"
|
||||
"node_path '/var/lib/heat-config/heat-config-chef"
|
||||
"/node'")
|
||||
mfdopen.return_value.write.assert_any_call(exp_cfg)
|
||||
calls = [
|
||||
mock.call(['git', 'clone', "https://github.com/fake.git",
|
||||
"/opt/heat/chef/kitchen"], env=mock.ANY,
|
||||
stderr=mock.ANY, stdout=mock.ANY),
|
||||
mock.call(['hostname', '-f'], env=mock.ANY, stderr=mock.ANY,
|
||||
stdout=mock.ANY),
|
||||
mock.call([
|
||||
'chef-client', '-z', '--config',
|
||||
'/var/lib/heat-config/heat-config-chef/client.rb', '-j',
|
||||
'/var/lib/heat-config/heat-config-chef/node/out.json'],
|
||||
env=mock.ANY, stderr=mock.ANY, stdout=mock.ANY)
|
||||
]
|
||||
mock_popen.assert_has_calls(calls, any_order=True)
|
||||
self.assertEqual({"deploy_status_code": 0,
|
||||
"deploy_stdout": "out",
|
||||
"deploy_stderr": "err"},
|
||||
json.loads(sys.stdout.getvalue()))
|
||||
|
||||
def test_hook_environment(self, mock_popen, mock_mkdirs, mock_chdir):
|
||||
data = copy.deepcopy(self.data)
|
||||
data['config'] = '["recipe[apache]"]'
|
||||
data['inputs'].append({'name': 'environment',
|
||||
'value': 'production'})
|
||||
hook_chef = self.get_module()
|
||||
sys.stdin.write(json.dumps(data))
|
||||
sys.stdin.seek(0)
|
||||
mock_subproc = mock.Mock()
|
||||
mock_popen.return_value = mock_subproc
|
||||
mock_subproc.communicate.return_value = ("out", "err")
|
||||
mock_subproc.returncode = 0
|
||||
with mock.patch("os.fdopen", mock.mock_open()) as mfdopen:
|
||||
with mock.patch("os.open", mock.mock_open()):
|
||||
hook_chef.main(json.dumps(data))
|
||||
exp_node = {
|
||||
'barval': {'foo': 'biff'},
|
||||
'fooval': {u'bar': u'baz'},
|
||||
'run_list': [u'recipe[apache]']
|
||||
}
|
||||
exp_node = json.dumps(exp_node, indent=4)
|
||||
exp_cfg = ("log_level :debug\n"
|
||||
"log_location STDOUT\n"
|
||||
"local_mode true\n"
|
||||
"chef_zero.enabled true\n"
|
||||
"cookbook_path '/var/lib/heat-config/"
|
||||
"heat-config-chef/kitchen/cookbooks'\n"
|
||||
"role_path '/var/lib/heat-config/"
|
||||
"heat-config-chef/kitchen/roles'\n"
|
||||
"environment_path '/var/lib/heat-config/"
|
||||
"heat-config-chef/kitchen/environments'\n"
|
||||
"environment 'production'\n"
|
||||
"node_path '/var/lib/heat-config/"
|
||||
"heat-config-chef/node'")
|
||||
mfdopen.return_value.write.assert_any_call(exp_cfg)
|
||||
mfdopen.return_value.write.assert_any_call(exp_node)
|
|
@ -1,267 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import fixtures
|
||||
from testtools import matchers
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
|
||||
class HookDockerCmdTest(common.RunScriptTest):
|
||||
data = {
|
||||
"name": "abcdef001",
|
||||
"group": "docker-cmd",
|
||||
"config": {
|
||||
"web": {
|
||||
"name": "x",
|
||||
"image": "xxx"
|
||||
},
|
||||
"db": {
|
||||
"name": "y",
|
||||
"image": "xxx",
|
||||
"net": "host",
|
||||
"restart": "always",
|
||||
"privileged": True,
|
||||
"user": "root",
|
||||
"volumes": [
|
||||
"/run:/run",
|
||||
"db:/var/lib/db"
|
||||
],
|
||||
"environment": [
|
||||
"KOLLA_CONFIG_STRATEGY=COPY_ALWAYS",
|
||||
"FOO=BAR"
|
||||
]
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(HookDockerCmdTest, self).setUp()
|
||||
self.hook_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements',
|
||||
'heat-config-docker-cmd/install.d/hook-docker-cmd.py')
|
||||
|
||||
self.cleanup_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements/heat-config-docker-cmd/',
|
||||
'os-refresh-config/configure.d/50-heat-config-docker-cmd')
|
||||
|
||||
self.fake_tool_path = self.relative_path(
|
||||
__file__,
|
||||
'config-tool-fake.py')
|
||||
|
||||
self.working_dir = self.useFixture(fixtures.TempDir())
|
||||
self.outputs_dir = self.useFixture(fixtures.TempDir())
|
||||
self.test_state_path = self.outputs_dir.join('test_state.json')
|
||||
|
||||
self.env = os.environ.copy()
|
||||
self.env.update({
|
||||
'HEAT_DOCKER_CMD_WORKING': self.working_dir.join(),
|
||||
'HEAT_DOCKER_CMD': self.fake_tool_path,
|
||||
'TEST_STATE_PATH': self.test_state_path,
|
||||
})
|
||||
|
||||
def test_hook(self):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': '',
|
||||
'stderr': 'Creating abcdef001_db_1...'
|
||||
})
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
|
||||
self.assertEqual({
|
||||
'deploy_stdout': '',
|
||||
'deploy_stderr': 'Creating abcdef001_db_1...\n'
|
||||
'Creating abcdef001_db_1...',
|
||||
'deploy_status_code': 0
|
||||
}, json.loads(stdout))
|
||||
|
||||
state_0 = self.json_from_file(self.test_state_path)
|
||||
state_1 = self.json_from_file('%s_1' % self.test_state_path)
|
||||
self.assertEqual([
|
||||
self.fake_tool_path,
|
||||
'run',
|
||||
'--detach=true',
|
||||
'--name',
|
||||
'abcdef001__db',
|
||||
'--env=KOLLA_CONFIG_STRATEGY=COPY_ALWAYS',
|
||||
'--env=FOO=BAR',
|
||||
'--net=host',
|
||||
'--privileged=true',
|
||||
'--restart=always',
|
||||
'--user=root',
|
||||
'--volume=/run:/run',
|
||||
'--volume=db:/var/lib/db',
|
||||
'xxx'
|
||||
], state_0['args'])
|
||||
self.assertEqual([
|
||||
self.fake_tool_path,
|
||||
'run',
|
||||
'--detach=true',
|
||||
'--name',
|
||||
'abcdef001__web',
|
||||
'xxx'
|
||||
], state_1['args'])
|
||||
|
||||
def test_hook_failed(self):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': '',
|
||||
'stderr': 'Error: image library/xxx:latest not found',
|
||||
'returncode': 1
|
||||
})
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual({
|
||||
'deploy_stdout': '',
|
||||
'deploy_stderr': 'Error: image library/xxx:latest not found\n'
|
||||
'Error: image library/xxx:latest not found',
|
||||
'deploy_status_code': 1
|
||||
}, json.loads(stdout))
|
||||
|
||||
state_0 = self.json_from_file(self.test_state_path)
|
||||
state_1 = self.json_from_file('%s_1' % self.test_state_path)
|
||||
self.assertEqual([
|
||||
self.fake_tool_path,
|
||||
'run',
|
||||
'--detach=true',
|
||||
'--name',
|
||||
'abcdef001__db',
|
||||
'--env=KOLLA_CONFIG_STRATEGY=COPY_ALWAYS',
|
||||
'--env=FOO=BAR',
|
||||
'--net=host',
|
||||
'--privileged=true',
|
||||
'--restart=always',
|
||||
'--user=root',
|
||||
'--volume=/run:/run',
|
||||
'--volume=db:/var/lib/db',
|
||||
'xxx'
|
||||
], state_0['args'])
|
||||
self.assertEqual([
|
||||
self.fake_tool_path,
|
||||
'run',
|
||||
'--detach=true',
|
||||
'--name',
|
||||
'abcdef001__web',
|
||||
'xxx'
|
||||
], state_1['args'])
|
||||
|
||||
def test_cleanup_deleted(self):
|
||||
with tempfile.NamedTemporaryFile(delete=False) as f:
|
||||
f.write(json.dumps([self.data]))
|
||||
f.flush()
|
||||
self.env['HEAT_SHELL_CONFIG'] = f.name
|
||||
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.cleanup_path], self.env)
|
||||
|
||||
# on the first run, abcdef001.json is written out, no docker calls made
|
||||
configs_path = os.path.join(self.env['HEAT_DOCKER_CMD_WORKING'],
|
||||
'abcdef001.json')
|
||||
self.assertThat(configs_path, matchers.FileExists())
|
||||
self.assertThat(self.test_state_path,
|
||||
matchers.Not(matchers.FileExists()))
|
||||
|
||||
# run again with empty config data
|
||||
with tempfile.NamedTemporaryFile(delete=False) as f:
|
||||
f.write(json.dumps([]))
|
||||
f.flush()
|
||||
self.env['HEAT_SHELL_CONFIG'] = f.name
|
||||
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.cleanup_path], self.env)
|
||||
|
||||
# on the second run, abcdef001.json is deleted, docker rm is run on
|
||||
# both containers
|
||||
configs_path = os.path.join(self.env['HEAT_DOCKER_CMD_WORKING'],
|
||||
'abcdef001.json')
|
||||
self.assertThat(configs_path,
|
||||
matchers.Not(matchers.FileExists()))
|
||||
state_0 = self.json_from_file(self.test_state_path)
|
||||
state_1 = self.json_from_file('%s_1' % self.test_state_path)
|
||||
self.assertEqual([
|
||||
self.fake_tool_path,
|
||||
'rm',
|
||||
'-f',
|
||||
'abcdef001__db',
|
||||
], state_0['args'])
|
||||
self.assertEqual([
|
||||
self.fake_tool_path,
|
||||
'rm',
|
||||
'-f',
|
||||
'abcdef001__web',
|
||||
], state_1['args'])
|
||||
|
||||
def test_cleanup_changed(self):
|
||||
with tempfile.NamedTemporaryFile(delete=False) as f:
|
||||
f.write(json.dumps([self.data]))
|
||||
f.flush()
|
||||
self.env['HEAT_SHELL_CONFIG'] = f.name
|
||||
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.cleanup_path], self.env)
|
||||
|
||||
# on the first run, abcdef001.json is written out, no docker calls made
|
||||
configs_path = os.path.join(self.env['HEAT_DOCKER_CMD_WORKING'],
|
||||
'abcdef001.json')
|
||||
self.assertThat(configs_path, matchers.FileExists())
|
||||
self.assertThat(self.test_state_path,
|
||||
matchers.Not(matchers.FileExists()))
|
||||
|
||||
# run again with changed config data
|
||||
new_data = copy.deepcopy(self.data)
|
||||
new_data['config']['web']['image'] = 'yyy'
|
||||
with tempfile.NamedTemporaryFile(delete=False) as f:
|
||||
f.write(json.dumps([new_data]))
|
||||
f.flush()
|
||||
self.env['HEAT_SHELL_CONFIG'] = f.name
|
||||
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.cleanup_path], self.env)
|
||||
|
||||
# on the second run, abcdef001.json is written with the new data,
|
||||
# docker rm is run on both containers
|
||||
configs_path = os.path.join(self.env['HEAT_DOCKER_CMD_WORKING'],
|
||||
'abcdef001.json')
|
||||
self.assertThat(configs_path, matchers.FileExists())
|
||||
state_0 = self.json_from_file(self.test_state_path)
|
||||
state_1 = self.json_from_file('%s_1' % self.test_state_path)
|
||||
self.assertEqual([
|
||||
self.fake_tool_path,
|
||||
'rm',
|
||||
'-f',
|
||||
'abcdef001__db',
|
||||
], state_0['args'])
|
||||
self.assertEqual([
|
||||
self.fake_tool_path,
|
||||
'rm',
|
||||
'-f',
|
||||
'abcdef001__web',
|
||||
], state_1['args'])
|
|
@ -1,177 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import fixtures
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
|
||||
class HookDockerComposeTest(common.RunScriptTest):
|
||||
data = {
|
||||
"name": "abcdef001",
|
||||
"group": "docker-compose",
|
||||
"inputs": [
|
||||
{
|
||||
"name": "env_files",
|
||||
"value": u'[ { "file_name": "./common.env", '
|
||||
u'"content": "xxxxx" }, '
|
||||
u'{ "file_name": "./test.env", '
|
||||
u'"content": "yyyy" }, '
|
||||
u'{ "file_name": "./test1.env", '
|
||||
u'"content": "zzz" } ]'
|
||||
}
|
||||
],
|
||||
"config": {
|
||||
"web": {
|
||||
"name": "x",
|
||||
"env_file": [
|
||||
"./common.env",
|
||||
"./test.env"
|
||||
]
|
||||
},
|
||||
"db": {
|
||||
"name": "y",
|
||||
"env_file": "./test1.env"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data_without_input = {
|
||||
"name": "abcdef001",
|
||||
"group": "docker-compose",
|
||||
"inputs": [],
|
||||
"config": {
|
||||
"web": {
|
||||
"name": "x",
|
||||
"env_file": [
|
||||
"./common.env",
|
||||
"./test.env"
|
||||
]
|
||||
},
|
||||
"db": {
|
||||
"name": "y",
|
||||
"env_file": "./test1.env"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(HookDockerComposeTest, self).setUp()
|
||||
self.hook_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements',
|
||||
'heat-config-docker-compose/install.d/hook-docker-compose.py')
|
||||
|
||||
self.fake_tool_path = self.relative_path(
|
||||
__file__,
|
||||
'config-tool-fake.py')
|
||||
|
||||
self.working_dir = self.useFixture(fixtures.TempDir())
|
||||
self.outputs_dir = self.useFixture(fixtures.TempDir())
|
||||
self.test_state_path = self.outputs_dir.join('test_state.json')
|
||||
|
||||
self.env = os.environ.copy()
|
||||
self.env.update({
|
||||
'HEAT_DOCKER_COMPOSE_WORKING': self.working_dir.join(),
|
||||
'HEAT_DOCKER_COMPOSE_CMD': self.fake_tool_path,
|
||||
'TEST_STATE_PATH': self.test_state_path,
|
||||
})
|
||||
|
||||
def test_hook(self):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': '',
|
||||
'stderr': 'Creating abcdef001_db_1...'
|
||||
})
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
|
||||
self.assertEqual({
|
||||
'deploy_stdout': '',
|
||||
'deploy_stderr': 'Creating abcdef001_db_1...',
|
||||
'deploy_status_code': 0
|
||||
}, json.loads(stdout))
|
||||
|
||||
state = self.json_from_file(self.test_state_path)
|
||||
self.assertEqual(
|
||||
[
|
||||
self.fake_tool_path,
|
||||
'up',
|
||||
'-d',
|
||||
'--no-build',
|
||||
],
|
||||
state['args'])
|
||||
|
||||
def test_hook_without_inputs(self):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': '',
|
||||
'stderr': 'env_file_not found...',
|
||||
'returncode': 1
|
||||
})
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data_without_input))
|
||||
|
||||
self.assertEqual({
|
||||
'deploy_stdout': '',
|
||||
'deploy_stderr': 'env_file_not found...',
|
||||
'deploy_status_code': 1
|
||||
}, json.loads(stdout))
|
||||
|
||||
state = self.json_from_file(self.test_state_path)
|
||||
self.assertEqual(
|
||||
[
|
||||
self.fake_tool_path,
|
||||
'up',
|
||||
'-d',
|
||||
'--no-build',
|
||||
],
|
||||
state['args'])
|
||||
|
||||
def test_hook_failed(self):
|
||||
|
||||
self.env.update({
|
||||
'TEST_RESPONSE': json.dumps({
|
||||
'stdout': '',
|
||||
'stderr': 'Error: image library/xxx:latest not found',
|
||||
'returncode': 1
|
||||
})
|
||||
})
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual({
|
||||
'deploy_stdout': '',
|
||||
'deploy_stderr': 'Error: image library/xxx:latest not found',
|
||||
'deploy_status_code': 1
|
||||
}, json.loads(stdout))
|
||||
|
||||
state = self.json_from_file(self.test_state_path)
|
||||
self.assertEqual(
|
||||
[
|
||||
self.fake_tool_path,
|
||||
'up',
|
||||
'-d',
|
||||
'--no-build',
|
||||
],
|
||||
state['args'])
|
|
@ -1,85 +0,0 @@
|
|||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import fixtures
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import yaml
|
||||
|
||||
from tests.software_config import common
|
||||
|
||||
log = logging.getLogger('test_hook_hiera_config')
|
||||
|
||||
HIERA_CONFIG_BASE = """
|
||||
---
|
||||
:backends:
|
||||
- json
|
||||
:json:
|
||||
:datadir: %(datadir)s
|
||||
:hierarchy:
|
||||
- %(datafile)s
|
||||
"""
|
||||
|
||||
|
||||
class HookHieraTest(common.RunScriptTest):
|
||||
|
||||
data = {
|
||||
'id': 'test_hiera',
|
||||
'name': 'fake_resource_name',
|
||||
'group': 'hiera',
|
||||
'config': {
|
||||
'hierarchy': ['compute'],
|
||||
'datafiles': {
|
||||
'compute': {'foo': 'bar'}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(HookHieraTest, self).setUp()
|
||||
self.hook_path = self.relative_path(
|
||||
__file__,
|
||||
'../..',
|
||||
'hot/software-config/elements',
|
||||
'heat-config-hiera/install.d/hook-hiera.py')
|
||||
|
||||
self.hieradata_dir = self.useFixture(fixtures.TempDir()).join()
|
||||
self.conf = tempfile.NamedTemporaryFile(mode='w', delete=False).name
|
||||
os.unlink(self.conf)
|
||||
|
||||
self.env = os.environ.copy()
|
||||
self.env.update({
|
||||
'HEAT_HIERA_CONFIG': self.conf,
|
||||
'HEAT_PUPPET_HIERA_DATADIR': self.hieradata_dir,
|
||||
})
|
||||
|
||||
def test_hook(self):
|
||||
|
||||
returncode, stdout, stderr = self.run_cmd(
|
||||
[self.hook_path], self.env, json.dumps(self.data))
|
||||
|
||||
self.assertEqual(0, returncode, stderr)
|
||||
ret = yaml.safe_load(stdout)
|
||||
self.assertIsNotNone(ret['deploy_stderr'])
|
||||
self.assertEqual('', ret['deploy_stdout'])
|
||||
self.assertEqual(0, ret['deploy_status_code'])
|
||||
|
||||
conf_data = HIERA_CONFIG_BASE % {'datadir': self.hieradata_dir,
|
||||
'datafile': 'compute'}
|
||||
with open(self.conf) as conf_file:
|
||||
self.assertEqual(conf_data, conf_file.read())
|
||||
|
||||
with open(os.path.join(self.hieradata_dir, 'compute.json')) as data:
|
||||
self.assertEqual("{\n \"foo\": \"bar\"\n}", data.read())
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue