Corrected test environment

Moved what were called unit tests to the functional directory as there is
more work to be done to make them self supporting.
Added sphinx to the test-requirements along with testr deps.
Adjusted the tox.ini to suit.
Added doc source tree with the minimal set of files to allow sphinx
success.

Change-Id: Ic64c2ccd01778bb0d1142d12ca43b2346bdc7aa7
This commit is contained in:
David C Kennedy 2016-11-21 12:57:41 +00:00
parent 8a6e619f72
commit d76af6a0f0
79 changed files with 442 additions and 166 deletions

3
.gitignore vendored
View File

@ -1 +1,4 @@
.idea
AUTHORS
ChangeLog
monasca_transform.egg-info

7
.testr.conf Normal file
View File

@ -0,0 +1,7 @@
[DEFAULT]
test_command=
${PYTHON:-python} -m subunit.run discover -t ./ ${OS_TEST_PATH:-./tests/unit} $LISTOPT $IDOPTION
test_id_option=--load-list $IDFILE
test_list_option=--list
group_regex=.*(test_cert_setup)

265
doc/source/conf.py Normal file
View File

@ -0,0 +1,265 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# monasca-transform documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 9 12:02:59 2012.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import subprocess
import sys
import warnings
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
# if os.getenv('HUDSON_PUBLISH_DOCS'):
# templates_path = ['_ga', '_templates']
# else:
# templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'monasca-transform'
copyright = u'2016, OpenStack Foundation'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['old']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['monasca-transform.']
# -- Options for man page output --------------------------------------------
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme_path = ["."]
# html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local",
"-n1"]
try:
html_last_updated_fmt = subprocess.Popen(
git_cmd, stdout=subprocess.PIPE).communicate()[0]
except Exception:
warnings.warn('Cannot get last updated time from git repository. '
'Not setting "html_last_updated_fmt".')
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'monasca-transformdoc'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples (source
# start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'monasca-transform.tex', u'Monasca-transform Documentation',
u'OpenStack', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'monasca-transform', u'Monasca-transform Documentation',
u'OpenStack', 'monasca-transform', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
# intersphinx_mapping = {'http://docs.python.org/': None}

19
doc/source/index.rst Normal file
View File

@ -0,0 +1,19 @@
..
Copyright 2016 OpenStack Foundation
All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
=================
Monasca-transform
=================

View File

@ -1,3 +1,4 @@
pbr>=1.8 # Apache-2.0
PyMySQL
six>=1.7.0 # MIT
SQLAlchemy<1.1.0,>=0.9.9

View File

@ -5,3 +5,9 @@ nose==1.3.0
mock>=1.0.1
tox
fixtures==1.4.0
# required to build documentation
sphinx!=1.3b1,<1.4,>=1.2.1 # BSD
#oslosphinx>=4.7.0 # Apache-2.0
testrepository>=0.0.18 # Apache-2.0/BSD
# computes code coverage percentages
coverage>=4.0 # Apache-2.0

View File

@ -0,0 +1,27 @@
# Copyright 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Add the location of Spark to the path
# TODO(someone) Does the "/opt/spark/current" location need to be configurable?
import os
import sys
try:
sys.path.append(os.path.join("/opt/spark/current", "python"))
sys.path.append(os.path.join("/opt/spark/current",
"python", "lib", "py4j-0.9-src.zip"))
except KeyError:
print("Error adding Spark location to the path")
# TODO(someone) not sure what action is appropriate
sys.exit(1)

View File

@ -20,11 +20,11 @@ from monasca_transform.transform.builder.generic_transform_builder \
from monasca_transform.transform.transform_utils import RecordStoreUtils
from monasca_transform.transform.transform_utils import TransformSpecsUtils
from monasca_transform.transform import TransformContextUtils
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.mem_total_all.data_provider import DataProvider
from tests.unit.test_resources.mock_component_manager \
from tests.functional.test_resources.mem_total_all.data_provider \
import DataProvider
from tests.unit \
import MockComponentManager
from tests.unit import SparkContextTest
class TransformBuilderTest(SparkContextTest):

View File

@ -12,9 +12,10 @@
# License for the specific language governing permissions and limitations
# under the License.
from monasca_transform.component.insert import InsertComponent
from oslo_config import cfg
from tests.unit.messaging.adapter import DummyAdapter
from monasca_transform.component.insert import InsertComponent
from tests.functional.messaging.adapter import DummyAdapter
class DummyInsert(InsertComponent):

View File

@ -12,9 +12,10 @@
# License for the specific language governing permissions and limitations
# under the License.
from monasca_transform.component.insert import InsertComponent
from oslo_config import cfg
from tests.unit.messaging.adapter import DummyAdapter
from monasca_transform.component.insert import InsertComponent
from tests.functional.messaging.adapter import DummyAdapter
class DummyInsertPreHourly(InsertComponent):

View File

@ -24,7 +24,7 @@ from monasca_transform.data_driven_specs.json_data_driven_specs_repo \
import JSONDataDrivenSpecsRepo
from monasca_transform.data_driven_specs.mysql_data_driven_specs_repo \
import MySQLDataDrivenSpecsRepo
from tests.unit.spark_context_test import SparkContextTest
from tests.functional.spark_context_test import SparkContextTest
class TestDataDrivenSpecsRepo(SparkContextTest):

View File

@ -11,11 +11,11 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
import mock
from mock import call
from mock import MagicMock
import unittest
from oslo_config import cfg
from pyspark.streaming.kafka import OffsetRange
@ -24,12 +24,11 @@ from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from tests.unit.messaging.adapter import DummyAdapter
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.kafka_data.data_provider import DataProvider
from tests.unit.test_resources.mock_component_manager \
from tests.functional.messaging.adapter import DummyAdapter
from tests.unit import DataProvider
from tests.unit \
import MockComponentManager
from tests.unit import SparkContextTest
class SparkUnitTest(unittest.TestCase):

View File

@ -11,26 +11,24 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import os
import random
import sys
import unittest
import uuid
import mock
from oslo_config import cfg
from pyspark.streaming.kafka import OffsetRange
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.processor.pre_hourly_processor import PreHourlyProcessor
from tests.unit.component.insert.dummy_insert import DummyInsert
from tests.unit.messaging.adapter import DummyAdapter
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.metrics_pre_hourly_data.data_provider \
import DataProvider
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.offset_specs import JSONOffsetSpecs
from monasca_transform.processor.pre_hourly_processor import PreHourlyProcessor
from tests.functional.messaging.adapter import DummyAdapter
class TestPreHourlyProcessorAgg(SparkContextTest):

View File

@ -13,18 +13,18 @@
# under the License.
from pyspark.sql import SQLContext
from monasca_transform.transform.transform_utils import RecordStoreUtils
from monasca_transform.transform.transform_utils import TransformSpecsUtils
from monasca_transform.transform import TransformContextUtils
from monasca_transform.component.setter.rollup_quantity \
import RollupQuantity
from monasca_transform.component.setter.set_aggregated_metric_name \
import SetAggregatedMetricName
from monasca_transform.component.usage.fetch_quantity \
import FetchQuantity
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.mem_total_all.data_provider import DataProvider
from monasca_transform.transform.transform_utils import RecordStoreUtils
from monasca_transform.transform.transform_utils import TransformSpecsUtils
from monasca_transform.transform import TransformContextUtils
from tests.functional.test_resources.mem_total_all.data_provider \
import DataProvider
from tests.unit import SparkContextTest
class SetAggregatedMetricNameTest(SparkContextTest):

View File

@ -12,17 +12,17 @@
# License for the specific language governing permissions and limitations
# under the License.
from pyspark.sql import SQLContext
from monasca_transform.transform.transform_utils import RecordStoreUtils
from monasca_transform.transform.transform_utils import TransformSpecsUtils
from monasca_transform.transform import TransformContextUtils
from tests.unit.spark_context_test import SparkContextTest
from monasca_transform.component.setter.rollup_quantity \
import RollupQuantity
from monasca_transform.component.usage.fetch_quantity \
import FetchQuantity
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.mem_total_all.data_provider import DataProvider
from monasca_transform.transform.transform_utils import RecordStoreUtils
from monasca_transform.transform.transform_utils import TransformSpecsUtils
from monasca_transform.transform import TransformContextUtils
from tests.functional.test_resources.mem_total_all.data_provider \
import DataProvider
class UsageComponentTest(SparkContextTest):

View File

@ -14,7 +14,7 @@
from stevedore.extension import Extension
from stevedore.extension import ExtensionManager
from tests.unit.component.insert.dummy_insert import DummyInsert
from monasca_transform.component.insert.prepare_data import PrepareData
from monasca_transform.component.setter.rollup_quantity \
@ -29,8 +29,7 @@ from monasca_transform.component.usage.fetch_quantity \
import FetchQuantity
from monasca_transform.component.usage.fetch_quantity_util \
import FetchQuantityUtil
from tests.unit.component.insert.dummy_insert import DummyInsert
from tests.unit.component.insert.dummy_insert_pre_hourly \
from tests.functional.component.insert.dummy_insert_pre_hourly \
import DummyInsertPreHourly

View File

@ -12,11 +12,18 @@
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
import unittest
import mock
from oslo_config import cfg
from pyspark.streaming.kafka import OffsetRange
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.fetch_quantity_data.data_provider \
import DataProvider
from tests.unit.test_resources.mock_component_manager \
import MockComponentManager
from tests.unit.test_resources.mock_data_driven_specs_repo \
import MockDataDrivenSpecsRepo
from monasca_transform.component.usage.fetch_quantity \
import FetchQuantityException
@ -25,15 +32,7 @@ from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from tests.unit.messaging.adapter import DummyAdapter
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.fetch_quantity_data.data_provider \
import DataProvider
from tests.unit.test_resources.mock_component_manager \
import MockComponentManager
from tests.unit.test_resources.mock_data_driven_specs_repo \
import MockDataDrivenSpecsRepo
from tests.functional.messaging.adapter import DummyAdapter
class TestFetchQuantityAgg(SparkContextTest):

View File

@ -12,22 +12,13 @@
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
import unittest
import mock
from oslo_config import cfg
from pyspark.sql import SQLContext
from pyspark.streaming.kafka import OffsetRange
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.processor.pre_hourly_processor import PreHourlyProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from tests.unit.component.insert.dummy_insert import DummyInsert
from tests.unit.messaging.adapter import DummyAdapter
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.fetch_quantity_data.data_provider \
import DataProvider
@ -39,6 +30,14 @@ from tests.unit.test_resources.mock_data_driven_specs_repo \
import MockDataDrivenSpecsRepo
from tests.unit.usage import dump_as_ascii_string
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.processor.pre_hourly_processor import PreHourlyProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from tests.functional.messaging.adapter import DummyAdapter
class TestFetchQuantityInstanceUsageAgg(SparkContextTest):

View File

@ -12,23 +12,11 @@
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
import unittest
import mock
from oslo_config import cfg
from pyspark.streaming.kafka import OffsetRange
from monasca_transform.component.usage.fetch_quantity_util import \
FetchQuantityUtilException
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from tests.unit.messaging.adapter import DummyAdapter
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.cpu_kafka_data.data_provider import DataProvider
from tests.unit.test_resources.mock_component_manager \
@ -36,6 +24,15 @@ from tests.unit.test_resources.mock_component_manager \
from tests.unit.test_resources.mock_data_driven_specs_repo \
import MockDataDrivenSpecsRepo
from monasca_transform.component.usage.fetch_quantity_util import \
FetchQuantityUtilException
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from tests.functional.messaging.adapter import DummyAdapter
class TestFetchQuantityUtilAgg(SparkContextTest):

View File

@ -12,9 +12,9 @@
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
import unittest
import mock
from oslo_config import cfg
from pyspark.sql import SQLContext
from pyspark.streaming.kafka import OffsetRange
@ -22,22 +22,20 @@ from pyspark.streaming.kafka import OffsetRange
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.processor.pre_hourly_processor import PreHourlyProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from tests.unit.component.insert.dummy_insert import DummyInsert
from tests.unit.messaging.adapter import DummyAdapter
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.cpu_kafka_data.data_provider import DataProvider
from tests.unit.test_resources.fetch_quantity_util_second_stage.data_provider \
from tests.functional.messaging.adapter import DummyAdapter
from tests.unit import DataProvider
from tests.unit \
import DataProvider as SecondStageDataProvider
from tests.unit.test_resources.mock_component_manager \
from tests.unit import DummyInsert
from tests.unit import dump_as_ascii_string
from tests.unit \
import MockComponentManager
from tests.unit.test_resources.mock_data_driven_specs_repo \
from tests.unit \
import MockDataDrivenSpecsRepo
from tests.unit.usage import dump_as_ascii_string
from tests.unit import SparkContextTest
class TestFetchQuantityUtilAgg(SparkContextTest):

View File

@ -11,9 +11,9 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import unittest
import mock
from oslo_config import cfg
from pyspark.streaming.kafka import OffsetRange
@ -22,12 +22,11 @@ from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from tests.unit.messaging.adapter import DummyAdapter
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.cpu_kafka_data.data_provider import DataProvider
from tests.unit.test_resources.mock_component_manager \
from tests.functional.messaging.adapter import DummyAdapter
from tests.unit import DataProvider
from tests.unit \
import MockComponentManager
from tests.unit import SparkContextTest
class SparkTest(SparkContextTest):

View File

@ -11,20 +11,12 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import unittest
import mock
from oslo_config import cfg
from pyspark.streaming.kafka import OffsetRange
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.processor.pre_hourly_processor import PreHourlyProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from pyspark.sql import SQLContext
from pyspark.streaming.kafka import OffsetRange
from tests.unit.component.insert.dummy_insert import DummyInsert
from tests.unit.messaging.adapter import DummyAdapter
from tests.unit.spark_context_test import SparkContextTest
@ -33,7 +25,14 @@ from tests.unit.test_resources.cpu_kafka_data_second_stage.data_provider \
import DataProvider as SecondStageDataProvider
from tests.unit.test_resources.mock_component_manager \
import MockComponentManager
from tests.unit.usage import dump_as_ascii_string
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.processor.pre_hourly_processor import PreHourlyProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from tests.functional.usage import dump_as_ascii_string
class SparkTest(SparkContextTest):

View File

@ -18,9 +18,9 @@ from monasca_transform.component.usage.fetch_quantity \
from monasca_transform.transform.transform_utils import RecordStoreUtils
from monasca_transform.transform.transform_utils import TransformSpecsUtils
from monasca_transform.transform import TransformContextUtils
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.mem_total_all.data_provider import DataProvider
from tests.functional.test_resources.mem_total_all.data_provider \
import DataProvider
from tests.unit import SparkContextTest
class UsageComponentTest(SparkContextTest):

View File

@ -12,26 +12,24 @@
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
import unittest
import mock
from oslo_config import cfg
from pyspark.streaming.kafka import OffsetRange
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from tests.unit.messaging.adapter import DummyAdapter
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.kafka_data.data_provider import DataProvider
from tests.unit.test_resources.mock_component_manager \
from tests.functional.messaging.adapter import DummyAdapter
from tests.unit import DataProvider
from tests.unit \
import MockComponentManager
from tests.unit.test_resources.mock_data_driven_specs_repo \
from tests.unit \
import MockDataDrivenSpecsRepo
from tests.unit import SparkContextTest
class TestVmCpuAllocatedAgg(SparkContextTest):

View File

@ -12,32 +12,30 @@
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
import unittest
import mock
from oslo_config import cfg
from pyspark.sql import SQLContext
from pyspark.streaming.kafka import OffsetRange
from monasca_transform.config.config_initializer import ConfigInitializer
from monasca_transform.driver.mon_metrics_kafka \
import MonMetricsKafkaProcessor
from monasca_transform.processor.pre_hourly_processor import PreHourlyProcessor
from monasca_transform.transform import RddTransformContext
from monasca_transform.transform import TransformContextUtils
from pyspark.sql import SQLContext
from tests.unit.component.insert.dummy_insert import DummyInsert
from tests.unit.messaging.adapter import DummyAdapter
from tests.unit.spark_context_test import SparkContextTest
from tests.unit.test_resources.kafka_data.data_provider import DataProvider
from tests.unit.test_resources.kafka_data_second_stage.data_provider \
from tests.functional.test_resources.kafka_data_second_stage.data_provider \
import DataProvider as SecondStageDataProvider
from tests.unit.test_resources.mock_component_manager \
from tests.unit import DataProvider
from tests.unit import DummyAdapter
from tests.unit import DummyInsert
from tests.unit import dump_as_ascii_string
from tests.unit \
import MockComponentManager
from tests.unit.test_resources.mock_data_driven_specs_repo \
from tests.unit \
import MockDataDrivenSpecsRepo
from tests.unit.usage import dump_as_ascii_string
from tests.unit import SparkContextTest
class TestVmCpuAllocatedAgg(SparkContextTest):

View File

@ -1,27 +0,0 @@
# Copyright 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Add the location of Spark to the path
# TODO(someone) Does the "/opt/spark/current" location need to be configurable?
import os
import sys
try:
sys.path.append(os.path.join("/opt/spark/current", "python"))
sys.path.append(os.path.join("/opt/spark/current",
"python", "lib", "py4j-0.9-src.zip"))
except KeyError:
print("Error adding Spark location to the path")
# TODO(someone) not sure what action is appropriate
sys.exit(1)

26
tox.ini
View File

@ -19,23 +19,6 @@ whitelist_externals = bash
find
commands =
find . -type f -name "*.pyc" -delete
nosetests \
tests/unit/builder/test_transform_builder.py \
tests/unit/config/config_initializer_test.py \
tests/unit/driver/first_attempt_at_spark_test.py \
tests/unit/data_driven_specs/test_data_driven_specs.py \
tests/unit/setter/test_set_aggregated_metric_name.py \
tests/unit/setter/test_setter_component.py \
tests/unit/test_json_kafka_offsets.py \
tests/unit/test_mysql_kafka_offsets.py \
tests/unit/usage/test_fetch_quantity_agg.py \
tests/unit/usage/test_fetch_quantity_util_agg.py \
tests/unit/usage/test_host_cpu_usage_component.py \
tests/unit/processor/test_pre_hourly_processor_agg.py \
tests/unit/usage/test_usage_component.py \
tests/unit/usage/test_vm_cpu_allocated_agg.py -e tests_to_fix
[testenv:pep8]
commands = flake8
@ -43,6 +26,13 @@ commands = flake8
[testenv:venv]
commands = {posargs}
[testenv:cover]
# Also do not run test_coverage_ext tests while gathering coverage as those
# tests conflict with coverage.
commands =
find monasca_transform -type f -name "*.pyc" -delete
python setup.py testr --coverage --testr-args='{posargs}'
[flake8]
max-complexity = 30
# TODO: ignored checks should be enabled in the future
@ -52,4 +42,4 @@ max-complexity = 30
# E402 module level import not at top of file FIXME remove this
ignore = H302,H904,H405,E402
show-source = True
exclude=.venv,.git,.tox,dist,*egg,build,tests_to_fix
exclude=.venv,.git,.tox,dist,*egg,build