Fix resource discovery for datasources

The lookahead code which checks path to source files and ssl certificate
did not properly consider the shared resources when package is installed.
Now data source files are looked locally first and then, if not found and
the path is relative, inside the default templates directory.
The default paths for test resources have been then changed to use defaults/
as base directory, with the exception of unit tests.
As the file referenced by yaml files must now exist, a new dummy file .crt
file was added for unit tests.

Change-Id: Ic385c6b4d694d95a11a626a3107fa1906924bd67
This commit is contained in:
Luigi Toscano 2016-04-19 12:32:26 +02:00
parent 53e7d377dd
commit ef229a275a
13 changed files with 107 additions and 66 deletions

View File

@ -3,13 +3,13 @@ edp_jobs_flow:
- type: Pig
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/data/input
source: edp-examples/edp-pig/trim-spaces/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
main_lib:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/example.pig
source: edp-examples/edp-pig/trim-spaces/example.pig
additional_libs:
- type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/udf.jar
source: edp-examples/edp-pig/trim-spaces/udf.jar

View File

@ -0,0 +1,6 @@
---
prelude: >
Discovery of data sources with relative paths is now fixed.
fixes:
- Datasources with relative paths are now properly found
from the default resources.

View File

@ -80,6 +80,7 @@ class BaseTestCase(base.BaseTestCase):
cls.testcase = None
cls._results = []
cls.report = False
cls.default_templ_dir = '.'
def setUp(self):
super(BaseTestCase, self).setUp()
@ -116,7 +117,8 @@ class BaseTestCase(base.BaseTestCase):
tenant_name,
self.credentials.get('ssl_verify',
False),
self.credentials.get('ssl_cert'))
self._get_file_with_defaults(
self.credentials.get('ssl_cert')))
self.sahara = clients.SaharaClient(session=session,
service_type=sahara_service_type,
@ -329,13 +331,36 @@ class BaseTestCase(base.BaseTestCase):
if report:
self.fail("\n".join(report))
def _get_file_with_defaults(self, file_path):
""" Check if the file exists; if it is a relative path, check also
among the default files.
"""
if not file_path:
return ''
all_files = [file_path]
if not os.path.isabs(file_path):
# relative path: look into default templates too, if defined
default_file = os.path.join(self.default_templ_dir, file_path)
if os.path.abspath(default_file) != os.path.abspath(file_path):
all_files.append(default_file)
for checked_file in all_files:
if os.path.isfile(checked_file):
return checked_file
raise Exception('File %s not found while looking into %s' %
(file_path, all_files))
def _read_source_file(self, source):
if not source:
return None
with open(self._get_file_with_defaults(source)) as source_fd:
data = source_fd.read()
return data
def _create_swift_data(self, source=None, destination=None):
container = self._get_swift_container()
path = utils.rand_name(destination if destination else 'test')
data = None
if source:
with open(source) as source_fd:
data = source_fd.read()
data = self._read_source_file(source)
self.__upload_to_container(container, path, data)
@ -361,8 +386,9 @@ class BaseTestCase(base.BaseTestCase):
"path": hdfs_dir,
"user": hdfs_username})
hdfs_filepath = utils.rand_name(hdfs_dir + "/file")
with open(source) as source_fd:
data = source_fd.read()
data = self._read_source_file(source)
if not data:
data = ''
self._run_command_on_node(
inst_ip,
("echo -e \"%(data)s\" | sudo su - -c \"%(prefix)s"
@ -374,8 +400,7 @@ class BaseTestCase(base.BaseTestCase):
return hdfs_filepath
def _create_internal_db_data(self, source):
with open(source) as source_fd:
data = source_fd.read()
data = self._read_source_file(source)
id = self.__create_internal_db_data(utils.rand_name('test'), data)
return 'internal-db://%s' % id

View File

@ -48,7 +48,7 @@ clusters:
- type: Spark
main_lib:
type: database
source: sahara_tests/scenario/defaults/edp-examples/edp-spark/spark-kafka-example.jar
source: edp-examples/edp-spark/spark-kafka-example.jar
args:
- '{zookeeper_list}'
- '{topic}'

View File

@ -3,27 +3,27 @@ edp_jobs_flow:
- type: Pig
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/data/input
source: edp-examples/edp-pig/trim-spaces/data/input
output_datasource:
type: swift
destination: edp-output
main_lib:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/example.pig
source: edp-examples/edp-pig/trim-spaces/example.pig
additional_libs:
- type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/udf.jar
source: edp-examples/edp-pig/trim-spaces/udf.jar
mapreduce_job:
- type: MapReduce
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/data/input
source: edp-examples/edp-pig/trim-spaces/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
additional_libs:
- type: database
source: sahara_tests/scenario/defaults/edp-examples/edp-mapreduce/edp-mapreduce.jar
source: edp-examples/edp-mapreduce/edp-mapreduce.jar
configs:
mapred.map.class: org.apache.oozie.example.SampleMapper
mapred.reduce.class: org.apache.oozie.example.SampleReducer
@ -32,7 +32,7 @@ edp_jobs_flow:
- type: MapReduce.Streaming
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/data/input
source: edp-examples/edp-pig/trim-spaces/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
@ -43,7 +43,7 @@ edp_jobs_flow:
- type: Java
additional_libs:
- type: database
source: sahara_tests/scenario/defaults/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.6.0.jar
source: edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.6.0.jar
configs:
edp.java.main_class: org.apache.hadoop.examples.QuasiMonteCarlo
args:
@ -53,11 +53,11 @@ edp_jobs_flow:
- type: Hive
main_lib:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-hive/script.q
source: edp-examples/edp-hive/script.q
input_datasource:
type: hdfs
hdfs_username: hadoop
source: sahara_tests/scenario/defaults/edp-examples/edp-hive/input.csv
source: edp-examples/edp-hive/input.csv
output_datasource:
type: hdfs
destination: /user/edp-output
@ -65,7 +65,7 @@ edp_jobs_flow:
- type: Spark
main_lib:
type: database
source: sahara_tests/scenario/defaults/edp-examples/edp-spark/spark-example.jar
source: edp-examples/edp-spark/spark-example.jar
configs:
edp.java.main_class: org.apache.spark.examples.SparkPi
args:
@ -74,10 +74,10 @@ edp_jobs_flow:
- type: Spark
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-spark/sample_input.txt
source: edp-examples/edp-spark/sample_input.txt
main_lib:
type: database
source: sahara_tests/scenario/defaults/edp-examples/edp-spark/spark-wordcount.jar
source: edp-examples/edp-spark/spark-wordcount.jar
configs:
edp.java.main_class: sahara.edp.spark.SparkWordCount
edp.spark.adapt_for_swift: true
@ -89,34 +89,34 @@ edp_jobs_flow:
- type: Pig
input_datasource:
type: maprfs
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/data/input
source: edp-examples/edp-pig/trim-spaces/data/input
output_datasource:
type: maprfs
destination: /user/hadoop/edp-output
main_lib:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/example.pig
source: edp-examples/edp-pig/trim-spaces/example.pig
additional_libs:
- type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/udf.jar
source: edp-examples/edp-pig/trim-spaces/udf.jar
mapr:
- type: MapReduce
input_datasource:
type: maprfs
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/data/input
source: edp-examples/edp-pig/trim-spaces/data/input
output_datasource:
type: maprfs
destination: /user/hadoop/edp-output
additional_libs:
- type: database
source: sahara_tests/scenario/defaults/edp-examples/edp-mapreduce/edp-mapreduce.jar
source: edp-examples/edp-mapreduce/edp-mapreduce.jar
configs:
mapred.mapper.class: org.apache.oozie.example.SampleMapper
mapred.reducer.class: org.apache.oozie.example.SampleReducer
- type: MapReduce.Streaming
input_datasource:
type: maprfs
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/trim-spaces/data/input
source: edp-examples/edp-pig/trim-spaces/data/input
output_datasource:
type: maprfs
destination: /user/hadoop/edp-output
@ -126,7 +126,7 @@ edp_jobs_flow:
- type: Java
additional_libs:
- type: database
source: sahara_tests/scenario/defaults/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.6.0.jar
source: edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.6.0.jar
configs:
edp.java.main_class: org.apache.hadoop.examples.QuasiMonteCarlo
args:

View File

@ -48,7 +48,7 @@ clusters:
- type: Spark
main_lib:
type: database
source: sahara_tests/scenario/defaults/edp-examples/edp-spark/spark-kafka-example.jar
source: edp-examples/edp-spark/spark-kafka-example.jar
args:
- '{zookeeper_list}'
- '{topic}'

View File

@ -315,12 +315,14 @@ def main():
testcases = config['clusters']
for case in range(count - 1):
testcases.extend(config['clusters'])
default_templ_dir = os.path.abspath(TEST_TEMPLATE_DIR)
# create testcase file
test_template = mako_template.Template(filename=TEST_TEMPLATE_PATH)
testcase_data = test_template.render(testcases=testcases,
credentials=credentials,
network=network, report=report)
network=network, report=report,
default_templ_dir=default_templ_dir)
test_dir_path = tempfile.mkdtemp()
print("The generated test file located at: %s" % test_dir_path)

View File

@ -13,6 +13,7 @@ class ${testcase['class_name']}TestCase(base.BaseTestCase):
cls.network = ${network}
cls.testcase = ${testcase}
cls.report = ${report}
cls.default_templ_dir = '${default_templ_dir}'
def test_plugin(self):
self.create_cluster()

View File

@ -0,0 +1 @@
# Unit tests require a file for the certificate. The content is not checked but the file must exists.

View File

@ -86,8 +86,8 @@ class TestBase(testtools.TestCase):
'data-processing-local',
'sahara_url':
'http://sahara_host:8386/v1.1',
'ssl_cert': 'sahara_tests/scenario/'
'defaults/tests/cert.crt',
'ssl_cert': 'sahara_tests/unit/'
'scenario/dummy.crt',
'ssl_verify': True}
self.base_scenario.plugin_opts = {'plugin_name': 'vanilla',
'hadoop_version': '2.7.1'}
@ -181,7 +181,7 @@ class TestBase(testtools.TestCase):
sahara_url='http://sahara_host:8386/v1.1')
swift.assert_called_with(
auth_version='2.0', user='admin', key='nova', insecure=False,
cacert='sahara_tests/scenario/defaults/tests/cert.crt',
cacert='sahara_tests/unit/scenario/dummy.crt',
tenant_name='admin', authurl='http://localhost:5000/v2.0')
nova.assert_called_with('2', session=fake_session)
@ -195,7 +195,7 @@ class TestBase(testtools.TestCase):
project_domain_name='default')
m_session.assert_called_with(
auth=fake_auth,
cert='sahara_tests/scenario/defaults/tests/cert.crt', verify=True)
cert='sahara_tests/unit/scenario/dummy.crt', verify=True)
@mock.patch('sahara_tests.scenario.clients.NeutronClient.get_network_id',
return_value='mock_net')

View File

@ -93,7 +93,7 @@ class RunnerUnitTest(testtools.TestCase):
"sahara_url": "http://127.0.0.1",
"os_password": "changed_nova",
"os_tenant": "changed_admin",
"ssl_cert": "sahara_tests/scenario/defaults/tests/cert.crt"
"ssl_cert": "sahara_tests/unit/scenario/dummy.crt"
},
"network": {
"type": "neutron",
@ -136,8 +136,10 @@ class RunnerUnitTest(testtools.TestCase):
"additional_libs": [
{
"type": "database",
"source": "sahara_tests/integration/tests/"
"resources/"
"source": 'sahara_tests/scenario/defaults/'
'edp-examples/hadoop2/edp-java/'
'hadoop-mapreduce-examples-'
'2.6.0.jars',
}],
"configs": "edp.java.main_class: org.apache.hadoop."
"examples.QuasiMonteCarlo",
@ -150,8 +152,10 @@ class RunnerUnitTest(testtools.TestCase):
"additional_libs": [
{
"type": "database",
"source": "sahara_tests/integration/tests/"
"resources/"
"source":
"sahara_tests/scenario/defaults/"
"edp-examples/hadoop2/edp-java/hadoop-"
"mapreduce-examples-2.6.0.jars"
}],
"configs": "edp.java.main_class: org.apache.hadoop."
"examples.QuasiMonteCarlo",
@ -169,7 +173,7 @@ class RunnerUnitTest(testtools.TestCase):
"os_password": "changed_nova",
"os_tenant": "changed_admin",
"sahara_service_type": "data-processing",
"ssl_cert": "sahara_tests/scenario/defaults/tests/cert.crt",
"ssl_cert": "sahara_tests/unit/scenario/dummy.crt",
"ssl_verify": False
},
}
@ -217,8 +221,10 @@ class RunnerUnitTest(testtools.TestCase):
'type': 'Java',
'additional_libs': [
{
'source': 'sahara_tests/integration/'
'tests/resources/',
'source': 'sahara_tests/scenario/defaults/'
'edp-examples/hadoop2/edp-java/'
'hadoop-mapreduce-examples-'
'2.6.0.jars',
'type': 'database'
}]
}

View File

@ -5,7 +5,7 @@ credentials:
os_password: nova
os_tenant: admin
os_auth_url: http://127.0.0.1:5000/v2.0
ssl_cert: sahara_tests/scenario/defaults/test/cert.crt
ssl_cert: sahara_tests/unit/scenario/dummy.crt
ssl_verify: True
network:
@ -58,17 +58,17 @@ edp_jobs_flow:
- type: Pig
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/top-todoers/data/input
source: edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
main_lib:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/top-todoers/example.pig
source: edp-examples/edp-pig/top-todoers/example.pig
- type: Java
additional_libs:
- type: database
source: sahara_tests/scenario/defaults/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.7.1.jar
source: edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.7.1.jar
configs:
edp.java.main_class: org.apache.hadoop.examples.QuasiMonteCarlo
args:
@ -80,10 +80,10 @@ edp_jobs_flow:
mapred.reducer.class: org.apache.oozie.example.SampleReducer
additional_libs:
- type: database
source: sahara_tests/scenario/defaults/edp-examples/edp-java/edp-java.jar
source: edp-examples/edp-java/edp-java.jar
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/top-todoers/data/input
source: edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
@ -93,17 +93,17 @@ edp_jobs_flow:
edp.streaming.reducer: /usr/bin/wc
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/top-todoers/data/input
source: edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
- type: Hive
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-hive/input.csv
source: edp-examples/edp-hive/input.csv
output_datasource:
type: hdfs
destination: /user/hadoop/edp-hive/
main_lib:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-hive/script.q
source: edp-examples/edp-hive/script.q

View File

@ -5,7 +5,7 @@ credentials:
os_password: ${OS_PASSWORD}
os_tenant: ${OS_TENANT_NAME}
os_auth_url: ${OS_AUTH_URL}
ssl_cert: sahara_tests/scenario/defaults/tests/cert.crt
ssl_cert: sahara_tests/unit/scenario/dummy.crt
ssl_verify: True
network:
@ -56,17 +56,17 @@ edp_jobs_flow:
- type: Pig
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/top-todoers/data/input
source: edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
main_lib:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/top-todoers/example.pig
source: edp-examples/edp-pig/top-todoers/example.pig
- type: Java
additional_libs:
- type: database
source: sahara_tests/scenario/defaults/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.7.1.jar
source: edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.7.1.jar
configs:
edp.java.main_class: org.apache.hadoop.examples.QuasiMonteCarlo
args:
@ -78,10 +78,10 @@ edp_jobs_flow:
mapred.reducer.class: org.apache.oozie.example.SampleReducer
additional_libs:
- type: database
source: sahara_tests/scenario/defaults/edp-examples/edp-java/edp-java.jar
source: edp-examples/edp-java/edp-java.jar
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/top-todoers/data/input
source: edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
@ -91,30 +91,30 @@ edp_jobs_flow:
edp.streaming.reducer: /usr/bin/wc
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/top-todoers/data/input
source: edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
- type: Hive
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-hive/input.csv
source: edp-examples/edp-hive/input.csv
output_datasource:
type: hdfs
destination: /user/hadoop/edp-hive/
main_lib:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-hive/script.q
source: edp-examples/edp-hive/script.q
- type: MapReduce
configs:
mapred.mapper.class: org.apache.oozie.example.SampleMapper
mapred.reducer.class: org.apache.oozie.example.SampleReducer
additional_libs:
- type: database
source: sahara_tests/scenario/defaults/edp-examples/edp-java/edp-java.jar
source: edp-examples/edp-java/edp-java.jar
input_datasource:
type: swift
source: sahara_tests/scenario/defaults/edp-examples/edp-pig/top-todoers/data/input
source: edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output