Clean up integration test script

This PS:

* adds a trap to clean up OSH which is deployed in the
  course of integration tests. It appears as though node cleanup
  in Jenkins is hanging so this is to try to ameliorate that
* creates a deckhand.conf.test to be used by functional and
  integration tests instead of writing it out dynamically [0]
* updates logging.conf.sample to dump logs to stdout/stderr
  by default as this is amenable to containers
* makes test_gabbi.py common between functional and integration
  tests to avoid unnecessary code duplication

[0] review comments in https://review.gerrithub.io/#/c/att-comdev/deckhand/+/407638/

Change-Id: I762fb0bde5f75effcde56316d92bd57b30026995
This commit is contained in:
Felipe Monteiro 2018-04-18 19:48:00 -04:00 committed by Scott Hussey
parent 6b56f1d1ab
commit c094b16ff6
8 changed files with 116 additions and 190 deletions

View File

@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test runner for functional and integration tests."""
import atexit
import os
import shutil
@ -22,7 +24,7 @@ from gabbi import driver
from gabbi.driver import test_pytest # noqa
from gabbi.handlers import jsonhandler
TEST_DIR = tempfile.mkdtemp(prefix='deckhand')
TEST_DIR = None
def __create_temp_test_dir():
@ -31,7 +33,11 @@ def __create_temp_test_dir():
in which all the test files are contained in one directory.
"""
root_test_dir = os.path.join(os.path.dirname(__file__), 'gabbits')
global TEST_DIR
TEST_DIR = tempfile.mkdtemp(prefix='deckhand')
root_test_dir = os.getenv('DECKHAND_TESTS_DIR', 'gabbits')
test_files = []
for root, dirs, files in os.walk(root_test_dir):
@ -59,7 +65,9 @@ __create_temp_test_dir()
@atexit.register
def __remove_temp_test_dir():
if os.path.exists(TEST_DIR):
global TEST_DIR
if TEST_DIR is not None and os.path.exists(TEST_DIR):
shutil.rmtree(TEST_DIR)
@ -84,13 +92,15 @@ class MultidocJsonpaths(jsonhandler.JSONHandler):
# NOTE: The simple approach to handling dictionary versus list response
# bodies is to always parse the response body as a list and index into
# the first element using [0] throughout the tests.
return list(yaml.safe_load_all(string))
return list(yaml.load_all(string))
def pytest_generate_tests(metafunc):
# NOTE(fmontei): While only `url` or `host` is needed, strangely both
# are needed because we use `pytest-html` which throws an error without
# `host`.
global TEST_DIR
driver.py_test_generator(
TEST_DIR, url=os.environ['DECKHAND_TEST_URL'], host='localhost',
# NOTE(fmontei): When there are multiple handlers listed that accept

View File

@ -0,0 +1,37 @@
[DEFAULT]
debug = true
publish_errors = true
use_stderr = true
# NOTE: allow_anonymous_access allows these functional tests to get around
# Keystone authentication, but the context that is provided has zero privileges
# so we must also override the policy file for authorization to pass.
allow_anonymous_access = true
[oslo_policy]
policy_file = policy.yaml
[barbican]
[database]
connection = ${DATABASE_URL}
[keystone_authtoken]
# NOTE(fmontei): Values taken from clouds.yaml. Values only used for
# integration testing.
#
# clouds.yaml (snippet):
#
# username: 'admin'
# password: 'password'
# project_name: 'admin'
# project_domain_name: 'default'
# user_domain_name: 'default'
# auth_url: 'http://keystone.openstack.svc.cluster.local/v3'
username = admin
password = password
project_name = admin
project_domain_name = Default
user_domain_name = Default
auth_url = http://keystone.openstack.svc.cluster.local/v3
auth_type = password

View File

@ -1,60 +0,0 @@
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
from gabbi import driver
from gabbi.driver import test_pytest # noqa
from gabbi.handlers import jsonhandler
TESTS_DIR = 'gabbits'
# This is quite similar to the existing JSONHandler, so use it as the base
# class instead of `gabbi.handlers.base.ContentHandler`.
class MultidocJsonpaths(jsonhandler.JSONHandler):
test_key_suffix = 'multidoc_jsonpaths'
@staticmethod
def accepts(content_type):
content_type = content_type.split(';', 1)[0].strip()
return (content_type.endswith('+yaml') or
content_type.startswith('application/yaml') or
content_type.startswith('application/x-yaml'))
@staticmethod
def dumps(data, pretty=False, test=None):
return yaml.safe_dump_all(data)
@staticmethod
def loads(string):
# NOTE: The simple approach to handling dictionary versus list response
# bodies is to always parse the response body as a list and index into
# the first element using [0] throughout the tests.
return list(yaml.safe_load_all(string))
def pytest_generate_tests(metafunc):
test_dir = os.path.join(os.path.dirname(__file__), TESTS_DIR)
# NOTE(fmontei): While only `url` or `host` is needed, strangely both
# are needed because we use `pytest-html` which throws an error without
# `host`.
driver.py_test_generator(
test_dir, url=os.environ['DECKHAND_TEST_URL'], host='localhost',
# NOTE(fmontei): When there are multiple handlers listed that accept
# the same content-type, the one that is earliest in the list will be
# used. Thus, we cannot specify multiple content handlers for handling
# list/dictionary responses from the server using different handlers.
content_handlers=[MultidocJsonpaths], metafunc=metafunc)

View File

@ -1,25 +1,33 @@
[loggers]
keys = root, deckhand
keys = root, deckhand, error
[handlers]
keys = file, null, syslog
keys = null, stderr, stdout
[formatters]
keys = simple, context
[logger_deckhand]
level = DEBUG
handlers = file
handlers = stdout
qualname = deckhand
[logger_error]
level = ERROR
handlers = stderr
[logger_root]
level = WARNING
handlers = null
[handler_file]
class = FileHandler
level = DEBUG
args = ('deckhand.log', 'w+')
[handler_stderr]
class = StreamHandler
args = (sys.stderr,)
formatter = context
[handler_stdout]
class = StreamHandler
args = (sys.stdout,)
formatter = context
[handler_null]
@ -27,11 +35,6 @@ class = logging.NullHandler
formatter = context
args = ()
[handler_syslog]
class = handlers.SysLogHandler
level = ERROR
args = ('/dev/log', handlers.SysLogHandler.LOG_USER)
[formatter_context]
class = oslo_log.formatters.ContextFormatter

View File

@ -33,7 +33,7 @@ function deploy_postgre {
function gen_config {
set -xe
log_section Creating config directory and test deckhand.conf
log_section "Creating config directory and test deckhand.conf"
CONF_DIR=$(mktemp -d -p $(pwd))
sudo chmod 777 -R $CONF_DIR
@ -43,108 +43,22 @@ function gen_config {
# Used by Deckhand's initialization script to search for config files.
export DECKHAND_CONFIG_DIR=$CONF_DIR
local conf_file=${CONF_DIR}/deckhand.conf
cp etc/deckhand/logging.conf.sample $CONF_DIR/logging.conf
envsubst '${DATABASE_URL}' < deckhand/tests/deckhand.conf.test > $conf_file
# Create a logging config file to dump everything to stdout/stderr.
cat <<EOCONF > $CONF_DIR/logging.conf
[loggers]
keys = root, deckhand, error
# Only set up logging if running Deckhand via uwsgi. The container already has
# values for logging.
if [ -z "$DECKHAND_IMAGE" ]; then
sed '1 a log_config_append = '"$CONF_DIR"'/logging.conf' $conf_file
fi
[handlers]
keys = null, stderr, stdout
[formatters]
keys = simple, context
[logger_deckhand]
level = DEBUG
handlers = stdout
qualname = deckhand
[logger_error]
level = ERROR
handlers = stderr
[logger_root]
level = WARNING
handlers = null
[handler_stderr]
class = StreamHandler
args = (sys.stderr,)
formatter = context
[handler_stdout]
class = StreamHandler
args = (sys.stdout,)
formatter = context
[handler_null]
class = logging.NullHandler
formatter = context
args = ()
[formatter_context]
class = oslo_log.formatters.ContextFormatter
[formatter_simple]
format=%(asctime)s.%(msecs)03d %(process)d %(levelname)s: %(message)s
EOCONF
# Create a Deckhand config file with bare minimum options.
cat <<EOCONF > $CONF_DIR/deckhand.conf
[DEFAULT]
debug = true
publish_errors = true
use_stderr = true
# NOTE: allow_anonymous_access allows these functional tests to get around
# Keystone authentication, but the context that is provided has zero privileges
# so we must also override the policy file for authorization to pass.
allow_anonymous_access = true
[oslo_policy]
policy_file = policy.yaml
[barbican]
[database]
connection = $DATABASE_URL
[keystone_authtoken]
# NOTE(fmontei): Values taken from clouds.yaml. Values only used for
# integration testing.
#
# clouds.yaml (snippet):
#
# username: 'admin'
# password: 'password'
# project_name: 'admin'
# project_domain_name: 'default'
# user_domain_name: 'default'
# auth_url: 'http://keystone.openstack.svc.cluster.local/v3'
username = admin
password = password
project_name = admin
project_domain_name = Default
user_domain_name = Default
auth_url = http://keystone.openstack.svc.cluster.local/v3
auth_type = password
EOCONF
# Only set up logging if running Deckhand via uwsgi. The container already has
# values for logging.
if [ -z "$DECKHAND_IMAGE" ]; then
sed '1 a log_config_append = '"$CONF_DIR"'/logging.conf' $CONF_DIR/deckhand.conf
fi
echo $CONF_DIR/deckhand.conf 1>&2
cat $CONF_DIR/deckhand.conf 1>&2
echo $conf_file 1>&2
cat $conf_file 1>&2
echo $CONF_DIR/logging.conf 1>&2
cat $CONF_DIR/logging.conf 1>&2
log_section Starting server
}
@ -165,7 +79,7 @@ function gen_paste {
function gen_policy {
set -xe
log_section Creating policy file with liberal permissions
log_section "Creating policy file with liberal permissions"
policy_file='etc/deckhand/policy.yaml.sample'
policy_pattern="deckhand\:"

View File

@ -26,12 +26,12 @@ function cleanup_deckhand {
if [ -n "$POSTGRES_ID" ]; then
sudo docker stop $POSTGRES_ID
fi
if [ -n "$DECKHAND_ID" ]; then
sudo docker stop $DECKHAND_ID
fi
if [ -d "$CONF_DIR" ]; then
rm -rf $CONF_DIR
fi
rm -rf $CONF_DIR
# Kill all processes and child processes (for example, if workers > 1)
# if using uwsgi only.
@ -92,16 +92,16 @@ deploy_deckhand
log_section Running tests
# Create folder for saving HTML test results.
if [ ! -d $ROOTDIR/results ]; then
mkdir $ROOTDIR/results
fi
mkdir -p $ROOTDIR/results
export DECKHAND_TESTS_DIR=${ROOTDIR}/../deckhand/tests/functional/gabbits
set +e
posargs=$@
if [ ${#posargs} -ge 1 ]; then
py.test -k $1 -svx $( dirname $ROOTDIR )/deckhand/tests/functional/test_gabbi.py --html=results/index.html
py.test -k $1 -svx $( dirname $ROOTDIR )/deckhand/tests/common/test_gabbi.py --html=results/index.html
else
py.test -svx $( dirname $ROOTDIR )/deckhand/tests/functional/test_gabbi.py --html=results/index.html
py.test -svx $( dirname $ROOTDIR )/deckhand/tests/common/test_gabbi.py --html=results/index.html
fi
TEST_STATUS=$?
set -e

View File

@ -19,24 +19,40 @@ CURRENT_DIR="$(pwd)"
: ${OSH_PATH:="../openstack-helm"}
function cleanup_osh {
set -xe
if [ -n "command -v kubectl" ]; then
kubectl delete namespace openstack
kubectl delete namespace ucp
fi
sudo systemctl disable kubelet --now
sudo systemctl stop kubelet
if [ -n "command -v docker" ]; then
sudo docker ps -aq | xargs -L1 -P16 sudo docker rm -f
fi
sudo rm -rf /var/lib/openstack-helm
}
function cleanup_deckhand {
set +e
if [ -n "$POSTGRES_ID" ]; then
sudo docker stop $POSTGRES_ID
fi
if [ -n "$DECKHAND_ID" ]; then
sudo docker stop $DECKHAND_ID
fi
if [ -d "$CONF_DIR" ]; then
rm -rf $CONF_DIR
fi
rm -rf $CONF_DIR
}
trap cleanup_deckhand EXIT
function deploy_barbican {
set -xe
@ -58,6 +74,8 @@ function deploy_barbican {
function deploy_osh_keystone_barbican {
set -xe
trap cleanup_osh EXIT
if [ ! -d "$OSH_INFRA_PATH" ]; then
git clone https://git.openstack.org/openstack/openstack-helm-infra.git ../openstack-helm-infra
fi
@ -98,6 +116,8 @@ function deploy_osh_keystone_barbican {
function deploy_deckhand {
set -xe
trap cleanup_deckhand EXIT
export OS_CLOUD=openstack_helm
cd ${CURRENT_DIR}
@ -153,11 +173,13 @@ function deploy_deckhand {
function run_tests {
set +e
export DECKHAND_TESTS_DIR=${CURRENT_DIR}/deckhand/tests/integration/gabbits
posargs=$@
if [ ${#posargs} -ge 1 ]; then
py.test -k $1 -svx ${CURRENT_DIR}/deckhand/tests/integration/test_gabbi.py
py.test -k $1 -svx ${CURRENT_DIR}/deckhand/tests/common/test_gabbi.py
else
py.test -svx ${CURRENT_DIR}/deckhand/tests/integration/test_gabbi.py
py.test -svx ${CURRENT_DIR}/deckhand/tests/common/test_gabbi.py
fi
TEST_STATUS=$?