Cluster attributes added to CSV report

Cluster attributes list synchronized with actual Nailgun attributes
list.
PostgreSQL setup provided by the OpenStack infra is used.

Change-Id: Id317f4db80459e55e62ab7885e0514d024bb7296
Closes-Bug: #1551249
This commit is contained in:
Alexander Kislitsky 2016-02-29 18:04:46 +03:00
parent 154a24df0f
commit cd121801e6
4 changed files with 25 additions and 24 deletions

View File

@ -31,6 +31,7 @@ INSTALLATION_INFO_SKELETON = {
'heat': None,
'images_ceph': None,
'images_vcenter': None,
'ironic': None,
'iser': None,
'kernel_params': None,
'libvirt_type': None,
@ -38,6 +39,11 @@ INSTALLATION_INFO_SKELETON = {
'mellanox_vf_num': None,
'mongo': None,
'murano': None,
'murano-cfapi': None,
'murano_glance_artifacts_plugin': None,
'neutron_dvr': None,
'neutron_l2_pop': None,
'neutron_l3_ha': None,
'nova_quota': None,
'nsx': None,
'nsx_replication': None,
@ -45,11 +51,17 @@ INSTALLATION_INFO_SKELETON = {
'objects_ceph': None,
'osd_pool_size': None,
'provision_method': None,
'public_ssl_cert_source': None,
'public_ssl_horizon': None,
'public_ssl_services': None,
'puppet_debug': None,
'repos': None,
'resume_guests_state_on_host_boot': None,
'sahara': None,
'syslog_transport': None,
'task_deploy': None,
'use_cow_images': None,
'volumes_block_device': None,
'vcenter': None,
'vlan_splinters': None,
'vlan_splinters_ovs': None,

View File

@ -59,6 +59,19 @@ class StatsToCsvExportTest(InstStructureTest, DbTest):
csv_keys_paths)
self.assertIn(['attributes', 'workloads_collector_enabled'],
csv_keys_paths)
self.assertIn(['attributes', 'ironic'], csv_keys_paths)
self.assertIn(['attributes', 'murano-cfapi'], csv_keys_paths)
self.assertIn(['attributes', 'murano_glance_artifacts_plugin'],
csv_keys_paths)
self.assertIn(['attributes', 'neutron_dvr'], csv_keys_paths)
self.assertIn(['attributes', 'neutron_l2_pop'], csv_keys_paths)
self.assertIn(['attributes', 'neutron_l3_ha'], csv_keys_paths)
self.assertIn(['attributes', 'public_ssl_cert_source'], csv_keys_paths)
self.assertIn(['attributes', 'public_ssl_horizon'], csv_keys_paths)
self.assertIn(['attributes', 'public_ssl_services'], csv_keys_paths)
self.assertIn(['attributes', 'puppet_debug'], csv_keys_paths)
self.assertIn(['attributes', 'task_deploy'], csv_keys_paths)
self.assertIn(['vmware_attributes', 'vmware_az_cinder_enable'],
csv_keys_paths)
self.assertIn(['vmware_attributes', 'vmware_az_nova_computes_num'],

View File

@ -1,22 +0,0 @@
#!/bin/sh
echo "Preparing pgpass file ${FUELSTAT_DB_ROOTPGPASS}"
echo "*:*:*:${FUELSTAT_DB_ROOT}:${FUELSTAT_DB_ROOTPW}" > ${FUELSTAT_DB_ROOTPGPASS}
chmod 600 ${FUELSTAT_DB_ROOTPGPASS}
export PGPASSFILE=${FUELSTAT_DB_ROOTPGPASS}
echo "Trying to find out if role ${FUELSTAT_DB_USER} exists"
root_roles=$(psql -h 127.0.0.1 -U ${FUELSTAT_DB_ROOT} -t -c "SELECT 'HERE' from pg_roles where rolname='${FUELSTAT_DB_USER}'")
if [[ ${root_roles} == *HERE ]];then
echo "Role ${FUELSTAT_DB_USER} exists. Setting password ${FUELSTAT_DB_PW}"
psql -h 127.0.0.1 -U ${FUELSTAT_DB_ROOT} -c "ALTER ROLE ${FUELSTAT_DB_USER} WITH SUPERUSER LOGIN PASSWORD '${FUELSTAT_DB_PW}'"
else
echo "Creating role ${FUELSTAT_DB_USER} with password ${FUELSTAT_DB_PASSWD}"
psql -h 127.0.0.1 -U ${FUELSTAT_DB_ROOT} -c "CREATE ROLE ${FUELSTAT_DB_USER} WITH SUPERUSER LOGIN PASSWORD '${FUELSTAT_DB_PW}'"
fi
echo "Dropping database ${FUELSTAT_DB} if exists"
psql -h 127.0.0.1 -U ${FUELSTAT_DB_ROOT} -c "DROP DATABASE IF EXISTS ${FUELSTAT_DB}"
echo "Creating database ${FUELSTAT_DB}"
psql -h 127.0.0.1 -U ${FUELSTAT_DB_ROOT} -c "CREATE DATABASE ${FUELSTAT_DB} OWNER ${FUELSTAT_DB_USER}"

View File

@ -16,12 +16,10 @@ setenv = VIRTUAL_ENV={envdir}
FUELSTAT_DB_PW=openstack_citest
FUELSTAT_DB_ROOT=postgres
FUELSTAT_DB_ROOTPW=insecure_slave
FUELSTAT_DB_ROOTPGPASS={toxinidir}/pgpass
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
deps =
-r{toxinidir}/test-requirements.txt
commands =
bash "{toxinidir}/tools/prepare_database.sh"
bash "{toxinidir}/tools/prepare_ci_config.sh"
python {toxinidir}/collector/manage_collector.py --mode test db upgrade \
-d {toxinidir}/collector/collector/api/db/migrations/