Add basic S3 tests (job binaries and data sources)

S3 tests are disabled by default because sahara-tests supports
different OpenStack releases. The new configuration key
can be removed when Queens is out of support.

Enable the S3 tests for master (and soon stable/rocky), excluding
the older branches (ocata, pike, queens).

Use the s3:/ format (the officially advertised one) for data sources.

Depends-On: https://review.openstack.org/588361
Change-Id: I75be81cd16d6d9385a3300238ada1a4d852ca12c
This commit is contained in:
Luigi Toscano 2018-08-08 15:58:39 +02:00
parent 2e8e155673
commit a9233c5bd3
5 changed files with 151 additions and 5 deletions

View File

@ -51,6 +51,8 @@
data_processing:
test_image_name: xenial-server-cloudimg-amd64-disk1
test_ssh_user: ubuntu
data-processing-feature-enabled:
s3: 'True'
devstack_plugins:
sahara: git://git.openstack.org/openstack/sahara
heat: git://git.openstack.org/openstack/heat
@ -64,6 +66,20 @@
- ^releasenotes/.*$
- ^sahara_tests/.*$
# variant for pre-Rocky branches (no S3)
- job:
name: sahara-tests-tempest
branches:
- stable/ocata
- stable/pike
- stable/queens
vars:
devstack_local_conf:
test-config:
$TEMPEST_CONFIG:
data-processing-feature-enabled:
s3: 'False'
- job:
name: sahara-tests-scenario
description: |

View File

@ -0,0 +1,6 @@
---
features:
- |
Added basic S3 API tests (job binaries and data sources) to the Tempest
plugin. The tests are disabled by default and can be enabled using
a new tempest.conf key (data-processing-feature-enabled.s3).

View File

@ -71,5 +71,11 @@ data_processing_feature_group = cfg.OptGroup(
DataProcessingFeaturesGroup = [
cfg.ListOpt('plugins',
default=["vanilla", "cdh"],
help="List of enabled data processing plugins")
help="List of enabled data processing plugins"),
# delete this and always execute the tests when Tempest and
# this Tempest plugin stop supporting Queens, the last version
# without or incomplete S3 support.
cfg.BoolOpt('s3',
default=False,
help='Does Sahara support S3?'),
]

View File

@ -12,14 +12,19 @@
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from testtools import testcase as tc
from tempest import config
from tempest.lib import decorators
from tempest.lib.common.utils import data_utils
from sahara_tempest_plugin.tests.api import base as dp_base
CONF = config.CONF
class DataSourceTest(dp_base.BaseDataProcessingTest):
@classmethod
def resource_setup(cls):
@ -36,6 +41,21 @@ class DataSourceTest(dp_base.BaseDataProcessingTest):
cls.swift_data_source = cls.swift_data_source_with_creds.copy()
del cls.swift_data_source['credentials']
cls.s3_data_source_with_creds = {
'url': 's3://sahara-bucket/input-source',
'description': 'Test data source',
'credentials': {
'accesskey': 'username',
'secretkey': 'key',
'endpoint': 'localhost',
'bucket_in_path': False,
'ssl': False
},
'type': 's3'
}
cls.s3_data_source = cls.s3_data_source_with_creds.copy()
del cls.s3_data_source['credentials']
cls.local_hdfs_data_source = {
'url': 'input-source',
'description': 'Test data source',
@ -65,6 +85,8 @@ class DataSourceTest(dp_base.BaseDataProcessingTest):
self.assertEqual(source_name, resp_body['name'])
if source_body['type'] == 'swift':
source_body = self.swift_data_source
elif source_body['type'] == 's3':
source_body = self.s3_data_source
self.assertDictContainsSubset(source_body, resp_body)
return resp_body['id'], source_name
@ -131,6 +153,44 @@ class DataSourceTest(dp_base.BaseDataProcessingTest):
self._create_data_source(self.swift_data_source_with_creds))
self._update_data_source(source_id)
@decorators.idempotent_id('54b68270-74d2-4c93-a324-09c2dccb1208')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_data_source_create(self):
self._create_data_source(self.s3_data_source_with_creds)
@decorators.idempotent_id('5f67a8d1-e362-4204-88ec-674630a71019')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_data_source_list(self):
source_info = (
self._create_data_source(self.s3_data_source_with_creds))
self._list_data_sources(source_info)
@decorators.idempotent_id('84017749-b9d6-4542-9d12-1c73239e03b2')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_data_source_get(self):
source_id, source_name = (
self._create_data_source(self.s3_data_source_with_creds))
self._get_data_source(source_id, source_name, self.s3_data_source)
@decorators.idempotent_id('fb8f9f44-17ea-4be9-8cec-e02f31a49bae')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_data_source_delete(self):
source_id, _ = (
self._create_data_source(self.s3_data_source_with_creds))
self._delete_data_source(source_id)
@decorators.idempotent_id('d069714a-86fb-45ce-8498-43901b065243')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_data_source_update(self):
source_id, _ = (
self._create_data_source(self.s3_data_source_with_creds))
self._update_data_source(source_id)
@tc.attr('smoke')
@decorators.idempotent_id('88505d52-db01-4229-8f1d-a1137da5fe2d')
def test_local_hdfs_data_source_create(self):

View File

@ -12,14 +12,19 @@
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from testtools import testcase as tc
from tempest import config
from tempest.lib import decorators
from tempest.lib.common.utils import data_utils
from sahara_tempest_plugin.tests.api import base as dp_base
CONF = config.CONF
class JobBinaryTest(dp_base.BaseDataProcessingTest):
# Link to the API documentation is https://developer.openstack.org/
# api-ref/data-processing/#job-binaries
@ -35,10 +40,22 @@ class JobBinaryTest(dp_base.BaseDataProcessingTest):
'password': cls.os_primary.credentials.password
}
}
# Create extra cls.swift_job_binary variable to use for comparison to
# job binary response body because response body has no 'extra' field.
cls.s3_job_binary_with_extra = {
'url': 's3://sahara-bucket/example.jar',
'description': 'Test job binary',
'extra': {
'accesskey': cls.os_primary.credentials.username,
'secretkey': cls.os_primary.credentials.password,
'endpoint': 'localhost'
}
}
# Create extra cls.swift_job_binary and cls.s3_job_binary variables
# to use for comparison to job binary response body
# because response body has no 'extra' field.
cls.swift_job_binary = cls.swift_job_binary_with_extra.copy()
del cls.swift_job_binary['extra']
cls.s3_job_binary = cls.s3_job_binary_with_extra.copy()
del cls.s3_job_binary['extra']
name = data_utils.rand_name('sahara-internal-job-binary')
cls.job_binary_data = 'Some script may be data'
@ -54,7 +71,7 @@ class JobBinaryTest(dp_base.BaseDataProcessingTest):
It creates a link to data (jar, pig files, etc.), ensures job binary
name and response body. Returns id and name of created job binary.
Data may not exist when using Swift as data storage.
Data may not exist when using Swift or S3 as data storage.
In other cases data must exist in storage.
"""
if not binary_name:
@ -66,8 +83,10 @@ class JobBinaryTest(dp_base.BaseDataProcessingTest):
# ensure that binary created successfully
self.assertEqual(binary_name, resp_body['name'])
if 'swift' in binary_body['url']:
if binary_body['url'].startswith('swift:'):
binary_body = self.swift_job_binary
elif binary_body['url'].startswith('s3:'):
binary_body = self.s3_job_binary
self.assertDictContainsSubset(binary_body, resp_body)
return resp_body['id'], binary_name
@ -107,6 +126,45 @@ class JobBinaryTest(dp_base.BaseDataProcessingTest):
# delete the job binary by id
self.client.delete_job_binary(binary_id)
@decorators.idempotent_id('1cda1990-bfa1-46b1-892d-fc3ceafde537')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_job_binary_create(self):
self._create_job_binary(self.s3_job_binary_with_extra)
@decorators.idempotent_id('69de4774-44fb-401d-9d81-8c4df83d6cdb')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_job_binary_list(self):
binary_info = self._create_job_binary(self.s3_job_binary_with_extra)
# check for job binary in list
binaries = self.client.list_job_binaries()['binaries']
binaries_info = [(binary['id'], binary['name']) for binary in binaries]
self.assertIn(binary_info, binaries_info)
@decorators.idempotent_id('479ba3ef-67b7-45c9-81e2-ea34366099ce')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_job_binary_get(self):
binary_id, binary_name = (
self._create_job_binary(self.s3_job_binary_with_extra))
# check job binary fetch by id
binary = self.client.get_job_binary(binary_id)['job_binary']
self.assertEqual(binary_name, binary['name'])
self.assertDictContainsSubset(self.s3_job_binary, binary)
@decorators.idempotent_id('d949472b-6a57-4250-905d-087dfb614633')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_job_binary_delete(self):
binary_id, _ = (
self._create_job_binary(self.s3_job_binary_with_extra))
# delete the job binary by id
self.client.delete_job_binary(binary_id)
@tc.attr('smoke')
@decorators.idempotent_id('63662f6d-8291-407e-a6fc-f654522ebab6')
def test_internal_db_job_binary_create(self):