Run tox on created plugin

This patch enables to run `tox` for testing generated plugin.
Also, adds tox targets for eslint, karma, docs and releasenotes
on generated plugin. In generated tox, "-local" targets are executed
for local tox test. In Ci infra, target jobs, i.e. without "-local"
targets, will be ran as setting,

To add these new targets, this patch refreshes requirements
and fixes tests.

Horizon plugins should be tested with Horizon on master branch,
so parent tox, i.e. "checkbuild" target on ui-cookiecutter/tox.ini,
will clone horizon from git into .tox/checkbuild/horizon/.

lower-constraints job will be added in subsequent patch.

Change-Id: I4a1fd2d90b086f6ebec80d44cc36ef8beb3dbf0b
This commit is contained in:
Shu Muto 2018-03-20 16:21:59 +09:00
parent bcd7416b25
commit 31151ab91e
17 changed files with 227 additions and 453 deletions

View File

@ -19,8 +19,14 @@ commands = {posargs}
setenv =
IGNORE_GIT=1
commands =
rm -rf {envdir}/horizon
git clone https://git.openstack.org/openstack/horizon {envdir}/horizon
rm -rf {envdir}/cafe-ui
cookiecutter --no-input --output-dir {envdir} {toxinidir}
flake8 {envdir}/cafe-ui
# To pass {postargs} for running `tox -edocs` in generated plugin,
# run `tox -- -edocs` in ui-cookiecutter.
tox --workdir {envdir}/cafe-ui/.tox -c {envdir}/cafe-ui/tox.ini {posargs}
whitelist_externals =
git
rm
tox

View File

@ -0,0 +1,31 @@
- project:
check:
jobs:
- horizon-openstack-tox-py35dj20:
required-projects:
openstack/horizon
- nodejs-npm-run-lint:
vars:
node_version: 4
required-projects:
openstack/horizon
- nodejs-npm-run-test:
vars:
node_version: 4
required-projects:
openstack/horizon
gate:
jobs:
- horizon-openstack-tox-py35dj20:
required-projects:
openstack/horizon
- nodejs-npm-run-lint:
vars:
node_version: 4
required-projects:
openstack/horizon
- nodejs-npm-run-test:
vars:
node_version: 4
required-projects:
openstack/horizon

View File

@ -1,152 +0,0 @@
# Makefile for Sphinx documentation
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
-rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/{{ cookiecutter.help_name }}.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/{{ cookiecutter.help_name }}.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/{{ cookiecutter.help_name }}"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/{{ cookiecutter.help_name }}"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."

View File

@ -0,0 +1,17 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
# Order matters to the pip dependency resolver, so sorting this file
# changes how packages are installed. New dependencies should be
# added in alphabetical order, however, some dependencies may need to
# be installed in a specific order.
#
# Requirements for docs
django-nose>=1.4.4 # BSD
mock>=2.0.0 # BSD
mox3>=0.20.0 # Apache-2.0
openstackdocstheme>=1.18.1 # Apache-2.0
reno>=2.5.0 # Apache-2.0
sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD
sphinxcontrib-apidoc>=0.2.0 # BSD

View File

@ -43,116 +43,6 @@ os.environ.setdefault('DJANGO_SETTINGS_MODULE',
# https://docs.djangoproject.com/en/1.8/releases/1.7/#standalone-scripts
django.setup()
def write_autodoc_index():
def find_autodoc_modules(module_name, sourcedir):
"""returns a list of modules in the SOURCE directory."""
modlist = []
os.chdir(os.path.join(sourcedir, module_name))
print("SEARCHING %s" % sourcedir)
for root, dirs, files in os.walk("."):
for filename in files:
if filename == 'tests.py':
continue
if filename.endswith(".py"):
# remove the pieces of the root
elements = root.split(os.path.sep)
# replace the leading "." with the module name
elements[0] = module_name
# and get the base module name
base, extension = os.path.splitext(filename)
if not (base == "__init__"):
elements.append(base)
result = ".".join(elements)
# print result
modlist.append(result)
return modlist
RSTDIR = os.path.abspath(os.path.join(BASE_DIR, "contributor/api"))
SRCS = [('{{ cookiecutter.module_name }}', ROOT), ]
EXCLUDED_MODULES = ()
CURRENT_SOURCES = {}
if not(os.path.exists(RSTDIR)):
os.mkdir(RSTDIR)
CURRENT_SOURCES[RSTDIR] = ['autoindex.rst']
INDEXOUT = open(os.path.join(RSTDIR, "autoindex.rst"), "w")
INDEXOUT.write("""
=================
Source Code Index
=================
.. contents::
:depth: 1
:local:
""")
for modulename, path in SRCS:
sys.stdout.write("Generating source documentation for %s\n" %
modulename)
INDEXOUT.write("\n%s\n" % modulename.capitalize())
INDEXOUT.write("%s\n" % ("=" * len(modulename),))
INDEXOUT.write(".. toctree::\n")
INDEXOUT.write(" :maxdepth: 1\n")
INDEXOUT.write("\n")
MOD_DIR = os.path.join(RSTDIR, modulename)
CURRENT_SOURCES[MOD_DIR] = []
if not(os.path.exists(MOD_DIR)):
os.mkdir(MOD_DIR)
for module in find_autodoc_modules(modulename, path):
if any([module.startswith(exclude) for exclude
in EXCLUDED_MODULES]):
print("Excluded module %s." % module)
continue
mod_path = os.path.join(path, *module.split("."))
generated_file = os.path.join(MOD_DIR, "%s.rst" % module)
INDEXOUT.write(" %s/%s\n" % (modulename, module))
# Find the __init__.py module if this is a directory
if os.path.isdir(mod_path):
source_file = ".".join((os.path.join(mod_path, "__init__"),
"py",))
else:
source_file = ".".join((os.path.join(mod_path), "py"))
CURRENT_SOURCES[MOD_DIR].append("%s.rst" % module)
# Only generate a new file if the source has changed or we don't
# have a doc file to begin with.
if not os.access(generated_file, os.F_OK) or (
os.stat(generated_file).st_mtime <
os.stat(source_file).st_mtime):
print("Module %s updated, generating new documentation."
% module)
FILEOUT = open(generated_file, "w")
header = "The :mod:`%s` Module" % module
FILEOUT.write("%s\n" % ("=" * len(header),))
FILEOUT.write("%s\n" % header)
FILEOUT.write("%s\n" % ("=" * len(header),))
FILEOUT.write(".. automodule:: %s\n" % module)
FILEOUT.write(" :members:\n")
FILEOUT.write(" :undoc-members:\n")
FILEOUT.write(" :show-inheritance:\n")
FILEOUT.write(" :noindex:\n")
FILEOUT.close()
INDEXOUT.close()
# Delete auto-generated .rst files for sources which no longer exist
for directory, subdirs, files in list(os.walk(RSTDIR)):
for old_file in files:
if old_file not in CURRENT_SOURCES.get(directory, []):
print("Removing outdated file for %s" % old_file)
os.remove(os.path.join(directory, old_file))
write_autodoc_index()
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
@ -166,10 +56,10 @@ write_autodoc_index()
# Add any Sphinx extension module names here, as strings.
# They can be extensions coming with Sphinx (named 'sphinx.ext.*')
# or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.todo',
extensions = ['sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinxcontrib.apidoc',
'openstackdocstheme',
]
@ -233,6 +123,12 @@ pygments_style = 'sphinx'
primary_domain = 'py'
nitpicky = False
# sphinxcontrib-apidoc
apidoc_module_dir = '../../{{ cookiecutter.module_name }}'
apidoc_output_dir = 'contributor/api'
apidoc_excluded_paths = [
'test',
]
# -- Options for HTML output --------------------------------------------------

View File

@ -10,21 +10,23 @@
"license": "Apache 2.0",
"author": "Openstack <openstack-dev@lists.openstack.org>",
"devDependencies": {
"eslint": "^1.10.3",
"eslint-config-openstack": "^1.2.4",
"eslint-plugin-angular": "1.0.1",
"jasmine-core": "2.4.1",
"karma": "1.1.2",
"karma-chrome-launcher": "1.0.1",
"karma-cli": "1.0.1",
"karma-coverage": "1.1.1",
"karma-jasmine": "1.0.2",
"karma-ng-html2js-preprocessor": "1.0.0",
"karma-threshold-reporter": "0.1.15"
"eslint": "3.19.x",
"eslint-config-openstack": "^4.0.1",
"eslint-plugin-angular": "3.1.x",
"jasmine-core": "2.8.x",
"karma": "1.7.x",
"karma-chrome-launcher": "^2.2.0",
"karma-cli": "1.0.x",
"karma-coverage": "1.1.x",
"karma-jasmine": "1.1.x",
"karma-ng-html2js-preprocessor": "1.0.x",
"karma-phantomjs-launcher": "1.0.x",
"karma-threshold-reporter": "0.1.x",
"phantomjs-prebuilt": "2.1.x"
},
"dependencies": {},
"scripts": {
"postinstall": "if [ ! -d .tox ] || [ ! -d .tox/py27 ]; then tox -epy27 --notest; fi",
"postinstall": "if [ ! -d .tox ] || [ ! -d .tox/karma ]; then tox -ekarma --notest; pip install -U -t ./.tox/karma/lib/python3.5/site-packages/ ../horizon; fi",
"lint": "eslint --no-color {{ cookiecutter.module_name }}/static",
"lintq": "eslint --quiet {{ cookiecutter.module_name }}/static",
"test": "karma start {{ cookiecutter.module_name }}/karma.conf.js --single-run"

View File

@ -7,11 +7,6 @@
# be installed in a specific order.
#
# PBR should always appear first
pbr>=2.0.0 # Apache-2.0
# If python-higginsclient will be created, we will use it.
#python-higginsclient>=0.1.0 # Apache-2.0
Babel>=2.3.4 # BSD
Django<1.9,>=1.8 # BSD
django-babel>=0.5.1 # BSD
django-compressor>=2.0 # MIT
django-pyscss>=2.0.2 # BSD License (2 clause)
pbr!=2.1.0,>=2.0.0 # Apache-2.0
horizon>=13.0.0 # Apache-2.0

View File

@ -22,8 +22,3 @@ classifier =
packages =
{{ cookiecutter.module_name }}
[build_sphinx]
all_files = 1
build-dir = doc/build
source-dir = doc/source
warning-is-error = 1

View File

@ -7,22 +7,10 @@
# be installed in a specific order.
#
# Hacking should appear first in case something else depends on pep8
hacking>=0.12.0,!=0.13.0,<0.14 # Apache-2.0
#
coverage>=3.6 # Apache-2.0
django-nose>=1.2 # BSD
mock>=2.0 # BSD
mox3>=0.7.0 # Apache-2.0
nodeenv>=0.9.4 # BSD License # BSD
nose # LGPL
nose-exclude # LGPL
nosehtmloutput>=0.0.3 # Apache-2.0
nosexcover # BSD
openstack.nose-plugin>=0.7 # Apache-2.0
openstackdocstheme>=1.11.0 # Apache-2.0
reno>=1.8.0 # Apache2
selenium>=2.50.1 # Apache-2.0
sphinx!=1.3b1,<1.3,>=1.2.1 # BSD
testtools>=1.4.0 # MIT
# This also needs xvfb library installed on your OS
xvfbwrapper>=0.1.3 #license: MIT
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
coverage!=4.4,>=4.0 # Apache-2.0
django-nose>=1.4.4 # BSD
mock>=2.0.0 # BSD
mox3>=0.20.0 # Apache-2.0
testtools>=2.2.0 # MIT

View File

@ -1,88 +0,0 @@
#!/usr/bin/env bash
# Client constraint file contains this client version pin that is in conflict
# with installing the client from source. We should remove the version pin in
# the constraints file before applying it for from-source installation.
# The script also has a secondary purpose to install certain special
# dependencies directly from git.
# Wrapper for pip install that always uses constraints.
function pip_install() {
pip install -c"$localfile" -U "$@"
}
# Grab the library from git using either zuul-cloner or pip. The former is
# there to a take advantage of the setup done by the gate infrastructure
# and honour any/all Depends-On headers in the commit message
function install_from_git() {
ZUUL_CLONER=/usr/zuul-env/bin/zuul-cloner
# GIT_HOST=git.openstack.org
GIT_HOST=github.com
PROJ=$1
EGG=$2
edit-constraints "$localfile" -- "$EGG"
if [ -x "$ZUUL_CLONER" ]; then
SRC_DIR="$VIRTUAL_ENV/src"
mkdir -p "$SRC_DIR"
cd "$SRC_DIR" >/dev/null
ZUUL_CACHE_DIR=${ZUUL_CACHE_DIR:-/opt/git} $ZUUL_CLONER \
--branch "$BRANCH_NAME" \
"git://$GIT_HOST" "$PROJ"
pip_install -e "$PROJ/."
cd - >/dev/null
else
SRC_DIR="$VIRTUAL_ENV/src/$PROJ"
git clone --depth 1 --branch $BRANCH_NAME https://$GIT_HOST/$PROJ $SRC_DIR
pip_install -e $SRC_DIR
fi
}
CONSTRAINTS_FILE="$1"
shift 1
# This script will either complete with a return code of 0 or the return code
# of whatever failed.
set -e
# NOTE(tonyb): Place this in the tox environment's log dir so it will get
# published to logs.openstack.org for easy debugging.
mkdir -p "$VIRTUAL_ENV/log/"
localfile="$VIRTUAL_ENV/log/upper-constraints.txt"
if [[ "$CONSTRAINTS_FILE" != http* ]]; then
CONSTRAINTS_FILE="file://$CONSTRAINTS_FILE"
fi
# NOTE(tonyb): need to add curl to bindep.txt if the project supports bindep
curl "$CONSTRAINTS_FILE" --insecure --progress-bar --output "$localfile"
pip_install openstack-requirements
# This is the main purpose of the script: Allow local installation of
# the current repo. It is listed in constraints file and thus any
# install will be constrained and we need to unconstrain it.
edit-constraints "$localfile" -- "$CLIENT_NAME"
declare -a passthrough_args
while [ $# -gt 0 ] ; do
case "$1" in
# If we have any special os:<repo_name:<egg_name> deps then process them
os:*)
declare -a pkg_spec
IFS=: pkg_spec=($1)
install_from_git "${pkg_spec[1]}" "${pkg_spec[2]}"
;;
# Otherwise just pass the other deps through to the constrained pip install
*)
passthrough_args+=("$1")
;;
esac
shift 1
done
# If *only* had special args then then isn't any need to run pip.
if [ -n "$passthrough_args" ] ; then
pip_install "${passthrough_args[@]}"
fi

View File

@ -1,6 +1,6 @@
[tox]
envlist = py35,py27,py27dj18,pep8
minversion = 2.0
envlist = pep8,py27-local,py35-local,py35dj20-local,eslint,karma-local,docs-local,releasenotes
minversion = 2.3.2
skipsdist = True
[testenv]
@ -13,19 +13,29 @@ setenv = VIRTUAL_ENV={envdir}
NOSE_OPENSTACK_RED=0.05
NOSE_OPENSTACK_YELLOW=0.025
NOSE_OPENSTACK_SHOW_ELAPSED=1
install_command = {toxinidir}/tools/tox_install.sh {env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} {opts} {packages}
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
# os:* is handled by tox_install.sh
os:openstack/horizon:horizon
deps =
-c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = python manage.py test {posargs} --settings={{cookiecutter.module_name}}.test.settings
[testenv:pep8]
commands = flake8 {posargs}
# For installation of horizon on local
# NOTICE: this tox.ini requires horizon repository cloned in sibling directory.
[testenv:hz-local]
commands =
pip install -e ../horizon
[testenv:venv]
commands = {posargs}
[testenv:pep8]
basepython = python3.5
commands = flake8 {posargs}
[flake8]
exclude = .venv,.git,.tox,dist,*lib/python*,*egg,build,node_modules
max-complexity = 20
[testenv:cover]
commands =
coverage erase
@ -33,38 +43,83 @@ commands =
coverage xml --omit '.tox/cover/*' -o 'cover/coverage.xml'
coverage html --omit '.tox/cover/*' -d 'cover/htmlcov'
[testenv:py27dj18]
# NOTE(shu-mutow): On CI infra, horizon will be installed
# according to job setting. but on local, we need to install
# horizon from master branch.
[testenv:py27]
basepython = python2.7
[testenv:py27-local]
basepython = python2.7
commands =
pip install django>=1.8,<1.9
python manage.py test {posargs} --settings={{cookiecutter.module_name}}.test.settings
{[testenv:hz-local]commands}
{[testenv]commands}
[testenv:py35-local]
basepython = python3.5
commands =
{[testenv:hz-local]commands}
{[testenv]commands}
[testenv:py35dj20]
basepython = python3.5
commands =
{[testenv:hz-local]commands}
{[testenv]commands}
[testenv:py35dj20-local]
basepython = python3.5
commands =
{[testenv:hz-local]commands}
pip install -U django>=2.0,<2.1
{[testenv]commands}
[testenv:eslint]
whitelist_externals = npm
basepython = python3.5
whitelist_externals =
npm
commands =
npm install
npm run postinstall
npm run lint
# NOTE(shu-mutow): The "postinstall" script on package.json will install horizon
# from master branch into py35 environment for testing javascripts.
# Horizon from master is needed to be cloned into ../horizon on both local and CI.
[testenv:karma]
# NOTE(shu-mutou): Until PhantomJS setup get reliable, we use
# Chromium for JS test. And npm can't launch Chromium via tox.
#whitelist_externals = npm
#commands =
# npm install
# npm run postinstall
# npm run test
whitelist_externals = echo
basepython = python3.5
whitelist_externals =
{[testenv:eslint]whitelist_externals}
commands =
echo "npm can't launch Chromium via tox."
echo "nexecute `npm run test`"
npm install
npm run test
[testenv:karma-local]
basepython = python3.5
whitelist_externals =
{[testenv:eslint]whitelist_externals}
commands =
{[testenv:karma]commands}
[testenv:docs]
commands = python setup.py build_sphinx
basepython = python3.5
deps =
-c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
-r{toxinidir}/doc/requirements.txt
commands=
sphinx-build -W -b html doc/source doc/build/html
[testenv:docs-local]
basepython = python3.5
deps =
{[testenv:docs]deps}
commands=
{[testenv:hz-local]commands}
{[testenv:docs]commands}
[testenv:releasenotes]
commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
basepython = python3.5
deps =
{[testenv:docs]deps}
commands =
sphinx-build -a -E -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
[flake8]
exclude = .venv,.git,.tox,dist,*lib/python*,*egg,build,node_modules
max-complexity = 20

View File

@ -1,19 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test import helpers as test
class {{cookiecutter.panel_func}}sTests(test.TestCase):
# Unit tests for {{cookiecutter.panel}}.
def test_me(self):
self.assertTrue(1 + 1 == 2)

View File

@ -18,8 +18,10 @@ var fs = require('fs');
var path = require('path');
module.exports = function (config) {
// This tox venv is setup in the post-install npm step
var toxPath = '../.tox/py27/lib/python2.7/site-packages/';
var toxPath = '../.tox/karma/lib/python3.5/site-packages/';
process.env.PHANTOMJS_BIN = 'node_modules/phantomjs-prebuilt/bin/phantomjs';
config.set({
preprocessors: {
@ -67,9 +69,8 @@ module.exports = function (config) {
toxPath + 'xstatic/pkg/angular_schema_form/data/schema-form.js',
toxPath + 'xstatic/pkg/angular_fileupload/data/ng-file-upload.js',
// TODO: These should be mocked.
toxPath + '/horizon/static/horizon/js/horizon.js',
toxPath + 'horizon/static/horizon/js/horizon.js',
/**
* Include framework source code from horizon that we need.
@ -120,14 +121,14 @@ module.exports = function (config) {
frameworks: ['jasmine'],
browsers: ['Chrome'],
browsers: ['PhantomJS'],
browserNoActivityTimeout: 60000,
reporters: ['progress', 'coverage', 'threshold'],
plugins: [
'karma-chrome-launcher',
'karma-phantomjs-launcher',
'karma-jasmine',
'karma-ng-html2js-preprocessor',
'karma-coverage',

View File

@ -16,6 +16,7 @@
describe('{{cookiecutter.panel_func}}s service', function() {
var service;
beforeEach(module('horizon.app.core.openstack-service-api'));
beforeEach(module('horizon.dashboard.{{cookiecutter.panel_group}}.{{cookiecutter.panel}}s'));
beforeEach(inject(function($injector) {
service = $injector.get('horizon.dashboard.{{cookiecutter.panel_group}}.{{cookiecutter.panel}}s.service');
@ -29,7 +30,7 @@
var result = service.getPromise({});
deferred.resolve({
data:{
items: [{id: 123, name: 'resource1'}]
items: [{id: '123abc', name: 'resource1'}]
}
});
$timeout.flush();
@ -39,7 +40,7 @@
});
describe('urlFunction', function() {
it("get url", inject(function() {
it("get url", inject(function($injector) {
var detailRoute = $injector.get('horizon.app.core.detailRoute');
var result = service.urlFunction({id:"123abc"});
expect(result).toBe(detailRoute + "OS::{{cookiecutter.api_name}}::{{cookiecutter.panel_func}}/123abc");

View File

@ -0,0 +1,23 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from openstack_dashboard.test import helpers as test
class {{cookiecutter.panel_func}}sClientTestCase(test.TestCase):
# NOTE(shu-mutow): following should be removed after end of Rocky.
use_mox = False
# Unit tests for Client API.
def test_me(self):
self.assertTrue(1 + 1 == 2)

View File

@ -0,0 +1,23 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from openstack_dashboard.test import helpers as test
class {{cookiecutter.panel_func}}sRestTestCase(test.TestCase):
# NOTE(shu-mutow): following should be removed after end of Rocky.
use_mox = False
# Unit tests for REST API.
def test_me(self):
self.assertTrue(1 + 1 == 2)