backport lib caps and test patches to fix stable gate

accommodate new oslo.config Ib8fe3f0e6f37f2ca5c0785e73b2c770b71fac9e6
commit 4637aeb2d3

fix oslo.db 4.15.0 breakage I69f8ff9f702064e8fc5bf4018ebc6f3b2a8ea1a8
commit f5794af695
also
commit b2d64d26ac
commit c062916a81

Fix expected content-type and move CORS tests to gabbi I0f23481d5f75694da23d05c9ef88005a0f2c27d7
note: don't include CORS test
commit 5a6d3935bc

Put the regex first Ibaffa3a8568ea058d964463df4b76196c4d2bc7a
commit e326a0612f

Fix some gabbi tests I9a60db296467b08ef32f956471ef5e4f81f462af
note: does not include change to live.yaml
commit 35c4a08dd4

Rename gabbits with _ to have - instead Ida8ce49582d5b8d927858982fae9fdfc267e4be4
commit 6980d7acf4

ceph: fix setup extra I21bfb7177c16b200dcbf1849176c50e1371128cc
commit f1021318e7

test: move root tests to their own class I68aca7822c781d915d167b152aab4d7deec07200
10975c55ff

sql: default to pymysql I00f050423fd86ffd51d7ef2bb7f5e5928620f412
commit 99d91a0b0b

cap gnocchiclient
Change-Id: Ib2960423cc3b72e1b21a9d9da9b87cdede1ab44c
commit 7bf0e3540a
This commit is contained in:
gord chung 2017-02-01 09:26:58 -05:00 committed by gordon chung
parent 20c5886ef1
commit 3dd88b6eb3
22 changed files with 60 additions and 53 deletions

View File

@ -57,7 +57,7 @@ sudo chown -R tempest:stack $BASE/data/tempest
# Run tests with tempst
cd $BASE/new/tempest
set +e
sudo -H -u tempest OS_TEST_TIMEOUT=$TEMPEST_OS_TEST_TIMEOUT tox -eall-plugin -- --concurrency=$TEMPEST_CONCURRENCY gnocchi
sudo -H -u tempest OS_TEST_TIMEOUT=$TEMPEST_OS_TEST_TIMEOUT tox -eall-plugin -- gnocchi --concurrency=$TEMPEST_CONCURRENCY
TEMPEST_EXIT_CODE=$?
set -e
if [[ $TEMPEST_EXIT_CODE != 0 ]]; then

View File

@ -127,7 +127,7 @@ function install_gnocchiclient {
git_clone_by_name python-gnocchiclient
setup_dev_lib python-gnocchiclient
else
pip_install gnocchiclient
pip_install 'gnocchiclient<3.0'
fi
}

View File

@ -36,7 +36,7 @@ except ImportError:
pymysql = None
import six
import sqlalchemy
import sqlalchemy.engine.url as sqlalchemy_url
from sqlalchemy.engine import url as sqlalchemy_url
import sqlalchemy.exc
from sqlalchemy import types
import sqlalchemy_utils
@ -221,14 +221,27 @@ class SQLAlchemyIndexer(indexer.IndexerDriver):
@classmethod
def _create_new_database(cls, url):
"""Used by testing to create a new database."""
purl = sqlalchemy_url.make_url(url)
purl = sqlalchemy_url.make_url(
cls.dress_url(
url))
purl.database = purl.database + str(uuid.uuid4()).replace('-', '')
new_url = str(purl)
sqlalchemy_utils.create_database(new_url)
return new_url
@staticmethod
def dress_url(url):
# If no explicit driver has been set, we default to pymysql
if url.startswith("mysql://"):
url = sqlalchemy_url.make_url(url)
url.drivername = "mysql+pymysql"
return str(url)
return url
def __init__(self, conf):
conf.set_override("connection", conf.indexer.url, "database")
conf.set_override("connection",
self.dress_url(conf.indexer.url),
"database")
self.conf = conf
self.facade = PerInstanceFacade(conf)

View File

@ -28,6 +28,7 @@ import sqlalchemy.engine.url as sqlalchemy_url
import sqlalchemy_utils
from gnocchi import indexer
from gnocchi.indexer import sqlalchemy
from gnocchi.rest import app
from gnocchi import service
from gnocchi import storage
@ -102,7 +103,9 @@ class ConfigFixture(fixture.GabbiFixture):
# NOTE(jd) All of that is still very SQL centric but we only support
# SQL for now so let's say it's good enough.
url = sqlalchemy_url.make_url(conf.indexer.url)
url = sqlalchemy_url.make_url(
sqlalchemy.SQLAlchemyIndexer.dress_url(
conf.indexer.url))
url.database = url.database + str(uuid.uuid4()).replace('-', '')
db_url = str(url)

View File

@ -441,6 +441,7 @@ tests:
url: /v1/archive_policy
method: POST
request_headers:
content-type: application/json
x-user-id: b45187c5-150b-4730-bcb2-b5e04e234220
x-project-id: 16764ee0-bffe-4843-aa36-04b002cdbc7c
data:

View File

@ -7,7 +7,7 @@ tests:
desc: Root URL must return information about API versions
url: /
response_headers:
content-type: application/json; charset=UTF-8
content-type: /^application\/json/
response_json_paths:
$.versions.[0].id: "v1.0"
$.versions.[0].status: "CURRENT"
@ -25,7 +25,7 @@ tests:
points: 20
status: 201
response_headers:
content-type: /application\/json/
content-type: /^application\/json/
location: $SCHEME://$NETLOC/v1/archive_policy/test1
response_json_paths:
$.name: test1
@ -95,7 +95,7 @@ tests:
desc: Resources index page should return list of type associated with a URL
url: /v1/resource/
response_headers:
content-type: application/json; charset=UTF-8
content-type: /^application\/json/
status: 200
response_json_paths:
$.generic: $SCHEME://$NETLOC/v1/resource/generic

View File

@ -37,7 +37,7 @@ tests:
status: 201
response_headers:
location: $SCHEME://$NETLOC/v1/resource/generic/f93450f2-d8a5-4d67-9985-02511241e7d1
content-type: application/json; charset=UTF-8
content-type: /^application\/json/
response_json_paths:
$.created_by_project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
$.created_by_user_id: 0fbb231484614b1a80131fc22f6afc9c

View File

@ -102,8 +102,7 @@ tests:
method: POST
request_headers:
content-type: plain/text
data:
archive_policy_name: cookies
data: '{"archive_policy_name": "cookies"}'
status: 415
- name: create valid metric

View File

@ -135,7 +135,7 @@ tests:
status: 201
response_headers:
location: $SCHEME://$NETLOC/v1/resource/generic/f93450f2-d8a5-4d67-9985-02511241e7d1
content-type: application/json; charset=UTF-8
content-type: /^application\/json/
response_json_paths:
$.created_by_project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
$.created_by_user_id: 0fbb231484614b1a80131fc22f6afc9c
@ -163,11 +163,7 @@ tests:
x-user-id: 0fbb231484614b1a80131fc22f6afc9c
x-project-id: f3d41b770cc14f0bb94a1d5be9c0e3ea
content-type: text/plain
data:
id: f93450f2-d8a5-4d67-9985-02511241e7d1
started_at: "2014-01-03T02:02:02.000000"
user_id: 0fbb231484614b1a80131fc22f6afc9c
project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
data: '{"id": "f93450f2-d8a5-4d67-9985-02511241e7d1", "started_at": "2014-01-03T02:02:02.000000", "user_id": "0fbb231484614b1a80131fc22f6afc9c", "project_id": "f3d41b770cc14f0bb94a1d5be9c0e3ea"}'
status: 415
# Create a new instance resource, demonstrate that including no data
@ -356,6 +352,7 @@ tests:
request_headers:
x-user-id: 0fbb231484614b1a80131fc22f6afc9c
x-project-id: f3d41b770cc14f0bb94a1d5be9c0e3ea
content-type: application/json
data:
host: compute2

View File

@ -15,6 +15,7 @@
import abc
import mock
import oslo_db.exception
from oslo_db.sqlalchemy import test_migrations
import six
import sqlalchemy_utils
@ -45,6 +46,14 @@ class ModelsMigrationsSync(
self.index = indexer.get_driver(self.conf)
self.index.connect()
self.index.upgrade(nocreate=True, create_legacy_resource_types=True)
self.addCleanup(self._drop_database)
def _drop_database(self):
try:
sqlalchemy_utils.drop_database(self.conf.indexer.url)
except oslo_db.exception.DBNonExistentDatabase:
# NOTE(sileht): oslo db >= 4.15.0 cleanup this for us
pass
@staticmethod
def get_metadata():
@ -56,7 +65,3 @@ class ModelsMigrationsSync(
@staticmethod
def db_sync(engine):
pass
def tearDown(self):
sqlalchemy_utils.drop_database(self.conf.indexer.url)
super(ModelsMigrationsSync, self).tearDown()

View File

@ -138,6 +138,12 @@ class RestTest(tests_base.TestCase, testscenarios.TestWithScenarios):
indexer=self.index,
auth=self.auth)
@staticmethod
def runTest():
pass
class RootTest(RestTest):
def test_deserialize_force_json(self):
with self.app.use_admin_user():
self.app.post(
@ -170,10 +176,6 @@ class RestTest(tests_base.TestCase, testscenarios.TestWithScenarios):
self.assertIs(type(status['storage']['summary']['metrics']), int)
self.assertIs(type(status['storage']['summary']['measures']), int)
@staticmethod
def runTest():
pass
class ArchivePolicyTest(RestTest):
"""Test the ArchivePolicies REST API.

View File

@ -35,7 +35,7 @@ class TestStatsd(tests_base.TestCase):
super(TestStatsd, self).setUp()
self.conf.set_override("resource_id",
uuid.uuid4(), "statsd")
str(uuid.uuid4()), "statsd")
self.conf.set_override("user_id",
self.STATSD_USER_ID, "statsd")
self.conf.set_override("project_id",

View File

@ -4,10 +4,9 @@ GNOCCHI_TEST_STORAGE_DRIVERS=${GNOCCHI_TEST_STORAGE_DRIVERS:-file}
GNOCCHI_TEST_INDEXER_DRIVERS=${GNOCCHI_TEST_INDEXER_DRIVERS:-postgresql}
for storage in ${GNOCCHI_TEST_STORAGE_DRIVERS}
do
export GNOCCHI_TEST_STORAGE_DRIVER=$storage
for indexer in ${GNOCCHI_TEST_INDEXER_DRIVERS}
do
export GNOCCHI_TEST_INDEXER_DRIVER=$indexer
export GNOCCHI_TEST_STORAGE_DRIVER=$storage
./setup-test-env.sh ./tools/pretty_tox.sh $*
pifpaf -g GNOCCHI_INDEXER_URL run $indexer -- ./tools/pretty_tox.sh $*
done
done

View File

@ -1,13 +0,0 @@
#!/bin/bash
set -e
set -x
# Activate pifpaf for indexer
GNOCCHI_TEST_INDEXER_DRIVER=${GNOCCHI_TEST_INDEXER_DRIVER:-postgresql}
eval `pifpaf run $GNOCCHI_TEST_INDEXER_DRIVER`
kill_pifpaf ()
{
test -n "$PIFPAF_PID" && kill "$PIFPAF_PID"
}
trap kill_pifpaf EXIT
export GNOCCHI_INDEXER_URL=${PIFPAF_URL/#mysql:/mysql+pymysql:}
$*

View File

@ -23,13 +23,13 @@ keystone =
keystonemiddleware>=4.0.0
mysql =
pymysql
oslo.db>=1.8.0
oslo.db>=1.8.0,!=4.13.1,!=4.13.2,!=4.15.0
sqlalchemy
sqlalchemy-utils
alembic>=0.7.6,!=0.8.1
postgresql =
psycopg2
oslo.db>=1.8.0
oslo.db>=1.8.0,!=4.13.1,!=4.13.2,!=4.15.0
sqlalchemy
sqlalchemy-utils
alembic>=0.7.6,!=0.8.1
@ -42,9 +42,9 @@ ceph =
msgpack-python
lz4
tooz>=1.30
ceph-pre-jewel:
ceph_pre_jewel:
cradox>=1.0.9
ceph-jewel-and-later:
ceph_jewel_and_later:
python-rados>=10.1.0 # not available on pypi
file =
msgpack-python
@ -57,7 +57,7 @@ doc =
PyYAML
Jinja2
test =
pifpaf
pifpaf>=0.1.0
gabbi>=0.101.2
coverage>=3.6
fixtures
@ -67,6 +67,7 @@ test =
os-testr
testrepository
testscenarios
testresources>=0.2.4 # Apache-2.0/BSD
testtools>=0.9.38
WebTest>=2.0.16
doc8

10
tox.ini
View File

@ -47,15 +47,15 @@ commands = {toxinidir}/tools/pretty_tox.sh '{posargs}'
deps = .[test,postgresql,file]
setenv = OS_TEST_PATH=gnocchi/tests/gabbi
basepython = python2.7
commands = {toxinidir}/setup-test-env.sh {toxinidir}/tools/pretty_tox.sh '{posargs}'
commands = pifpaf -g GNOCCHI_INDEXER_URL run postgresql -- {toxinidir}/tools/pretty_tox.sh '{posargs}'
[testenv:py27-cover]
commands = {toxinidir}/setup-test-env.sh python setup.py testr --coverage --testr-args="{posargs}"
commands = pifpaf -g GNOCCHI_INDEXER_URL run postgresql -- python setup.py testr --coverage --testr-args="{posargs}"
[testenv:venv]
# This is used by the doc job on the gate
deps = {[testenv:docs]deps}
commands = {toxinidir}/setup-test-env.sh {posargs}
commands = pifpaf -g GNOCCHI_INDEXER_URL run postgresql -- {posargs}
[flake8]
exclude = .tox,.eggs,doc
@ -73,9 +73,9 @@ deps = .[test,postgresql,file,doc]
setenv = GNOCCHI_TEST_STORAGE_DRIVER=file
GNOCCHI_TEST_INDEXER_DRIVER=postgresql
commands = doc8 --ignore-path doc/source/rest.rst doc/source
{toxinidir}/setup-test-env.sh python setup.py build_sphinx
pifpaf -g GNOCCHI_INDEXER_URL run postgresql -- python setup.py build_sphinx
[testenv:docs-gnocchi.xyz]
deps = .[file,postgresql,test,doc]
sphinx_rtd_theme
commands = {toxinidir}/setup-test-env.sh sphinx-build -D html_theme=sphinx_rtd_theme doc/source doc/build/html
commands = pifpaf -g GNOCCHI_INDEXER_URL run postgresql -- sphinx-build -D html_theme=sphinx_rtd_theme doc/source doc/build/html